From 641370ac0421ff7e09a98f0bb6b2e104b4c081a2 Mon Sep 17 00:00:00 2001 From: Daniel K Date: Mon, 29 Jul 2024 19:54:26 -0700 Subject: [PATCH] revert: deprecated eshipper_xml and usps* extensions --- modules/connectors/eshipper_xml/README.md | 30 + modules/connectors/eshipper_xml/generate | 12 + .../karrio/mappers/eshipper_xml/__init__.py | 21 + .../karrio/mappers/eshipper_xml/mapper.py | 55 + .../karrio/mappers/eshipper_xml/proxy.py | 38 + .../karrio/mappers/eshipper_xml/settings.py | 19 + .../karrio/providers/eshipper_xml/__init__.py | 9 + .../karrio/providers/eshipper_xml/error.py | 19 + .../karrio/providers/eshipper_xml/quote.py | 175 + .../karrio/providers/eshipper_xml/shipping.py | 313 + .../karrio/providers/eshipper_xml/units.py | 264 + .../karrio/providers/eshipper_xml/utils.py | 37 + .../providers/eshipper_xml/void_shipment.py | 50 + .../karrio/schemas/eshipper_xml/__init__.py | 0 .../karrio/schemas/eshipper_xml/error.py | 1859 ++++ .../schemas/eshipper_xml/quote_reply.py | 2362 +++++ .../schemas/eshipper_xml/quote_request.py | 4906 ++++++++++ .../eshipper_xml/shipment_cancel_reply.py | 2063 +++++ .../eshipper_xml/shipment_cancel_request.py | 1915 ++++ .../schemas/eshipper_xml/shipping_reply.py | 3836 ++++++++ .../schemas/eshipper_xml/shipping_request.py | 7896 +++++++++++++++++ modules/connectors/eshipper_xml/setup.py | 25 + .../connectors/eshipper_xml/tests/__init__.py | 1 + .../tests/eshipper_xml/__init__.py | 2 + .../tests/eshipper_xml/fixture.py | 8 + .../tests/eshipper_xml/test_rate.py | 206 + .../tests/eshipper_xml/test_shipment.py | 233 + .../documentation/eShipper API v3.2.1.pdf | Bin 0 -> 336078 bytes .../vendor/sample/sample_quote_reply.xml | 18 + .../vendor/sample/sample_quote_request.xml | 14 + .../sample/sample_shipment_cancel_reply.xml | 6 + .../sample/sample_shipment_cancel_request.xml | 5 + .../vendor/sample/sample_shipping_reply.xml | 26 + .../vendor/sample/sample_shipping_request.xml | 28 + .../eshipper_xml/vendor/schemas/error.xsd | 24 + .../vendor/schemas/quote_reply.xsd | 42 + .../vendor/schemas/quote_request.xsd | 148 + .../vendor/schemas/shipment_cancel_reply.xsd | 34 + .../schemas/shipment_cancel_request.xsd | 26 + .../vendor/schemas/shipping_reply.xsd | 106 + .../vendor/schemas/shipping_request.xsd | 276 + modules/connectors/usps/README.md | 3 +- modules/connectors/usps/generate | 86 +- .../usps/karrio/mappers/usps/__init__.py | 7 +- .../usps/karrio/mappers/usps/mapper.py | 179 +- .../usps/karrio/mappers/usps/proxy.py | 173 +- .../usps/karrio/mappers/usps/settings.py | 17 +- .../usps/karrio/providers/usps/__init__.py | 22 +- .../usps/karrio/providers/usps/error.py | 34 +- .../karrio/providers/usps/pickup/__init__.py | 1 - .../karrio/providers/usps/pickup/cancel.py | 58 +- .../karrio/providers/usps/pickup/create.py | 136 +- .../karrio/providers/usps/pickup/update.py | 144 +- .../usps/karrio/providers/usps/rate.py | 217 +- .../providers/usps/shipment/__init__.py | 11 +- .../karrio/providers/usps/shipment/cancel.py | 76 +- .../karrio/providers/usps/shipment/create.py | 380 +- .../usps/karrio/providers/usps/tracking.py | 115 +- .../usps/karrio/providers/usps/units.py | 551 +- .../usps/karrio/providers/usps/utils.py | 90 +- .../schemas/usps/address_validate_request.py | 1563 ++++ .../schemas/usps/address_validate_response.py | 1722 ++++ .../carrier_pickup_availability_request.py | 1471 +++ .../carrier_pickup_availability_response.py | 1481 ++++ .../usps/carrier_pickup_cancel_request.py | 1471 +++ .../usps/carrier_pickup_cancel_response.py | 1754 ++++ .../usps/carrier_pickup_change_request.py | 1744 ++++ .../usps/carrier_pickup_change_response.py | 1754 ++++ .../usps/carrier_pickup_inquiry_request.py | 1471 +++ .../usps/carrier_pickup_inquiry_response.py | 1754 ++++ .../usps/carrier_pickup_schedule_request.py | 1727 ++++ .../usps/carrier_pickup_schedule_response.py | 1754 ++++ .../schemas/usps/city_state_lookup_request.py | 1440 +++ .../usps/city_state_lookup_response.py | 1459 +++ .../schemas/usps/emrsv4_0_bulk_request.py | 1846 ++++ .../usps/karrio/schemas/usps/error.py | 1379 +++ .../karrio/schemas/usps/evs_cancel_request.py | 1335 +++ .../schemas/usps/evs_cancel_response.py | 1345 +++ .../usps/evs_express_mail_intl_request.py | 3343 +++++++ .../usps/evs_express_mail_intl_response.py | 1600 ++++ .../usps/evs_first_class_mail_intl_request.py | 3174 +++++++ .../evs_first_class_mail_intl_response.py | 1746 ++++ .../schemas/usps/evs_gxg_get_label_request.py | 3352 +++++++ .../usps/evs_gxg_get_label_response.py | 1694 ++++ .../usps/evs_priority_mail_intl_request.py | 3446 +++++++ .../usps/evs_priority_mail_intl_response.py | 1831 ++++ .../usps/karrio/schemas/usps/evs_request.py | 4034 +++++++++ .../usps/karrio/schemas/usps/evs_response.py | 1979 +++++ .../schemas/usps/evsi_cancel_request.py | 1335 +++ .../schemas/usps/evsi_cancel_response.py | 1345 +++ .../usps/express_mail_commitment_request.py | 1437 +++ .../usps/express_mail_commitment_response.py | 1846 ++++ .../schemas/usps/first_class_mail_request.py | 1403 +++ .../schemas/usps/first_class_mail_response.py | 1396 +++ .../schemas/usps/hfp_facility_info_request.py | 1403 +++ .../usps/hfp_facility_info_response.py | 1601 ++++ .../schemas/usps/intl_rate_v2_request.py | 2150 +++++ .../schemas/usps/intl_rate_v2_response.py | 2877 ++++++ .../karrio/schemas/usps/mrsv4_0_request.py | 1828 ++++ .../schemas/usps/priority_mail_request.py | 1403 +++ .../schemas/usps/priority_mail_response.py | 1396 +++ .../karrio/schemas/usps/pts_email_request.py | 1497 ++++ .../karrio/schemas/usps/pts_emailresult.py | 1328 +++ .../usps/karrio/schemas/usps/ptspod_result.py | 1328 +++ .../usps/karrio/schemas/usps/ptsrre_result.py | 1328 +++ .../karrio/schemas/usps/ptstpod_request.py | 1522 ++++ .../karrio/schemas/usps/ptstpod_result.py | 1328 +++ .../karrio/schemas/usps/rate_v4_request.py | 2216 +++++ .../karrio/schemas/usps/rate_v4_response.py | 2719 ++++++ .../usps/karrio/schemas/usps/scan_request.py | 1855 ++++ .../usps/karrio/schemas/usps/scan_response.py | 1536 ++++ .../schemas/usps/sdc_get_locations_request.py | 1437 +++ .../usps/sdc_get_locations_response.py | 3217 +++++++ .../karrio/schemas/usps/standard_b_request.py | 1386 +++ .../schemas/usps/standard_b_response.py | 1396 +++ .../schemas/usps/track_field_request.py | 1520 ++++ .../usps/karrio/schemas/usps/track_request.py | 1432 +++ .../karrio/schemas/usps/track_response.py | 3349 +++++++ .../schemas/usps/zip_code_lookup_request.py | 1542 ++++ .../schemas/usps/zip_code_lookup_response.py | 1544 ++++ .../usps/schemas/AddressValidateRequest.xsd | 25 + .../usps/schemas/AddressValidateResponse.xsd | 34 + .../CarrierPickupAvailabilityRequest.xsd | 19 + .../CarrierPickupAvailabilityResponse.xsd | 19 + .../schemas/CarrierPickupCancelRequest.xsd | 19 + .../schemas/CarrierPickupCancelResponse.xsd | 35 + .../schemas/CarrierPickupChangeRequest.xsd | 35 + .../schemas/CarrierPickupChangeResponse.xsd | 35 + .../schemas/CarrierPickupInquiryRequest.xsd | 19 + .../schemas/CarrierPickupInquiryResponse.xsd | 35 + .../schemas/CarrierPickupScheduleRequest.xsd | 34 + .../schemas/CarrierPickupScheduleResponse.xsd | 35 + .../usps/schemas/CityStateLookupRequest.xsd | 18 + .../usps/schemas/CityStateLookupResponse.xsd | 18 + .../usps/schemas/EMRSV4.0BulkRequest.xsd | 41 + modules/connectors/usps/schemas/Error.xsd | 13 + .../schemas/ExpressMailCommitmentRequest.xsd | 18 + .../schemas/ExpressMailCommitmentResponse.xsd | 41 + .../usps/schemas/FirstClassMailRequest.xsd | 16 + .../usps/schemas/FirstClassMailResponse.xsd | 15 + .../usps/schemas/HFPFacilityInfoRequest.xsd | 16 + .../usps/schemas/HFPFacilityInfoResponse.xsd | 27 + .../usps/schemas/IntlRateV2Request.xsd | 73 + .../usps/schemas/IntlRateV2Response.xsd | 104 + .../usps/schemas/MRSV4.0Request.xsd | 41 + .../usps/schemas/PTSEmailRequest.xsd | 21 + .../usps/schemas/PTSEmailResult.xsd | 11 + .../connectors/usps/schemas/PTSPODRequest.xsd | 30 + .../connectors/usps/schemas/PTSPODResult.xsd | 11 + .../connectors/usps/schemas/PTSRRERequest.xsd | 22 + .../connectors/usps/schemas/PTSRREResult.xsd | 11 + .../connectors/usps/schemas/PTSTPODResult.xsd | 11 + .../usps/schemas/PTSTPodRequest.xsd | 23 + .../usps/schemas/PriorityMailRequest.xsd | 16 + .../usps/schemas/PriorityMailResponse.xsd | 15 + .../connectors/usps/schemas/RateV4Request.xsd | 81 + .../usps/schemas/RateV4Response.xsd | 85 + .../connectors/usps/schemas/SCANRequest.xsd | 44 + .../connectors/usps/schemas/SCANResponse.xsd | 32 + .../usps/schemas/SDCGetLocationsRequest.xsd | 18 + .../usps/schemas/SDCGetLocationsResponse.xsd | 124 + .../usps/schemas/StandardBRequest.xsd | 15 + .../usps/schemas/StandardBResponse.xsd | 15 + .../usps/schemas/TrackFieldRequest.xsd | 28 + .../connectors/usps/schemas/TrackRequest.xsd | 20 + .../connectors/usps/schemas/TrackResponse.xsd | 132 + .../usps/schemas/ZipCodeLookupRequest.xsd | 25 + .../usps/schemas/ZipCodeLookupResponse.xsd | 24 + .../usps/schemas/eVSCancelRequest.xsd | 11 + .../usps/schemas/eVSCancelResponse.xsd | 11 + .../schemas/eVSExpressMailIntlRequest.xsd | 131 + .../schemas/eVSExpressMailIntlResponse.xsd | 27 + .../schemas/eVSFirstClassMailIntlRequest.xsd | 121 + .../schemas/eVSFirstClassMailIntlResponse.xsd | 36 + .../usps/schemas/eVSGXGGetLabelRequest.xsd | 132 + .../usps/schemas/eVSGXGGetLabelResponse.xsd | 33 + .../usps/schemas/eVSICancelRequest.xsd | 12 + .../usps/schemas/eVSICancelResponse.xsd | 12 + .../schemas/eVSPriorityMailIntlRequest.xsd | 137 + .../schemas/eVSPriorityMailIntlResponse.xsd | 41 + .../connectors/usps/schemas/eVSRequest.xsd | 177 + .../connectors/usps/schemas/eVSResponse.xsd | 53 + modules/connectors/usps/setup.py | 6 +- modules/connectors/usps/tests/__init__.py | 5 +- modules/connectors/usps/tests/usps/fixture.py | 30 +- .../connectors/usps/tests/usps/test_rate.py | 320 +- .../usps/tests/usps/test_shipment.py | 409 +- .../usps/tests/usps/test_tracking.py | 288 +- .../connectors/usps_international/README.md | 9 +- .../connectors/usps_international/generate | 86 +- .../mappers/usps_international/__init__.py | 7 +- .../mappers/usps_international/mapper.py | 169 +- .../mappers/usps_international/proxy.py | 177 +- .../mappers/usps_international/settings.py | 21 +- .../providers/usps_international/__init__.py | 22 +- .../providers/usps_international/error.py | 34 +- .../usps_international/pickup/__init__.py | 15 +- .../usps_international/pickup/cancel.py | 58 +- .../usps_international/pickup/create.py | 147 +- .../usps_international/pickup/update.py | 150 +- .../providers/usps_international/rate.py | 197 +- .../usps_international/shipment/__init__.py | 10 +- .../usps_international/shipment/cancel.py | 76 +- .../usps_international/shipment/create.py | 273 +- .../shipment/first_class_mail.py | 177 + .../shipment/global_express_guaranteed.py | 186 + .../shipment/priority_express.py | 191 + .../shipment/priority_mail.py | 200 + .../providers/usps_international/tracking.py | 121 +- .../providers/usps_international/units.py | 437 +- .../providers/usps_international/utils.py | 90 +- .../address_validate_request.py | 1563 ++++ .../address_validate_response.py | 1722 ++++ .../carrier_pickup_availability_request.py | 1471 +++ .../carrier_pickup_availability_response.py | 1481 ++++ .../carrier_pickup_cancel_request.py | 1471 +++ .../carrier_pickup_cancel_response.py | 1754 ++++ .../carrier_pickup_change_request.py | 1744 ++++ .../carrier_pickup_change_response.py | 1754 ++++ .../carrier_pickup_inquiry_request.py | 1471 +++ .../carrier_pickup_inquiry_response.py | 1754 ++++ .../carrier_pickup_schedule_request.py | 1727 ++++ .../carrier_pickup_schedule_response.py | 1754 ++++ .../city_state_lookup_request.py | 1440 +++ .../city_state_lookup_response.py | 1459 +++ .../emrsv4_0_bulk_request.py | 1846 ++++ .../schemas/usps_international/error.py | 1379 +++ .../usps_international/evs_cancel_request.py | 1335 +++ .../usps_international/evs_cancel_response.py | 1345 +++ .../evs_express_mail_intl_request.py | 3343 +++++++ .../evs_express_mail_intl_response.py | 1600 ++++ .../evs_first_class_mail_intl_request.py | 3174 +++++++ .../evs_first_class_mail_intl_response.py | 1746 ++++ .../evs_gxg_get_label_request.py | 3352 +++++++ .../evs_gxg_get_label_response.py | 1694 ++++ .../evs_priority_mail_intl_request.py | 3446 +++++++ .../evs_priority_mail_intl_response.py | 1831 ++++ .../schemas/usps_international/evs_request.py | 4034 +++++++++ .../usps_international/evs_response.py | 1979 +++++ .../usps_international/evsi_cancel_request.py | 1335 +++ .../evsi_cancel_response.py | 1345 +++ .../express_mail_commitment_request.py | 1437 +++ .../express_mail_commitment_response.py | 1846 ++++ .../first_class_mail_request.py | 1403 +++ .../first_class_mail_response.py | 1396 +++ .../hfp_facility_info_request.py | 1403 +++ .../hfp_facility_info_response.py | 1601 ++++ .../intl_rate_v2_request.py | 2150 +++++ .../intl_rate_v2_response.py | 2877 ++++++ .../usps_international/mrsv4_0_request.py | 1828 ++++ .../priority_mail_request.py | 1403 +++ .../priority_mail_response.py | 1396 +++ .../usps_international/pts_email_request.py | 1497 ++++ .../usps_international/pts_emailresult.py | 1328 +++ .../usps_international/ptspod_result.py | 1328 +++ .../usps_international/ptsrre_result.py | 1328 +++ .../usps_international/ptstpod_request.py | 1522 ++++ .../usps_international/ptstpod_result.py | 1328 +++ .../usps_international/rate_v4_request.py | 2216 +++++ .../usps_international/rate_v4_response.py | 2719 ++++++ .../usps_international/scan_request.py | 1855 ++++ .../usps_international/scan_response.py | 1536 ++++ .../sdc_get_locations_request.py | 1437 +++ .../sdc_get_locations_response.py | 3217 +++++++ .../usps_international/standard_b_request.py | 1386 +++ .../usps_international/standard_b_response.py | 1396 +++ .../usps_international/track_field_request.py | 1520 ++++ .../usps_international/track_request.py | 1432 +++ .../usps_international/track_response.py | 3349 +++++++ .../zip_code_lookup_request.py | 1542 ++++ .../zip_code_lookup_response.py | 1544 ++++ .../schemas/AddressValidateRequest.xsd | 25 + .../schemas/AddressValidateResponse.xsd | 34 + .../CarrierPickupAvailabilityRequest.xsd | 19 + .../CarrierPickupAvailabilityResponse.xsd | 19 + .../schemas/CarrierPickupCancelRequest.xsd | 19 + .../schemas/CarrierPickupCancelResponse.xsd | 35 + .../schemas/CarrierPickupChangeRequest.xsd | 35 + .../schemas/CarrierPickupChangeResponse.xsd | 35 + .../schemas/CarrierPickupInquiryRequest.xsd | 19 + .../schemas/CarrierPickupInquiryResponse.xsd | 35 + .../schemas/CarrierPickupScheduleRequest.xsd | 34 + .../schemas/CarrierPickupScheduleResponse.xsd | 35 + .../schemas/CityStateLookupRequest.xsd | 18 + .../schemas/CityStateLookupResponse.xsd | 18 + .../schemas/EMRSV4.0BulkRequest.xsd | 41 + .../usps_international/schemas/Error.xsd | 13 + .../schemas/ExpressMailCommitmentRequest.xsd | 18 + .../schemas/ExpressMailCommitmentResponse.xsd | 41 + .../schemas/FirstClassMailRequest.xsd | 16 + .../schemas/FirstClassMailResponse.xsd | 15 + .../schemas/HFPFacilityInfoRequest.xsd | 16 + .../schemas/HFPFacilityInfoResponse.xsd | 27 + .../schemas/IntlRateV2Request.xsd | 73 + .../schemas/IntlRateV2Response.xsd | 104 + .../schemas/MRSV4.0Request.xsd | 41 + .../schemas/PTSEmailRequest.xsd | 21 + .../schemas/PTSEmailResult.xsd | 11 + .../schemas/PTSPODRequest.xsd | 30 + .../schemas/PTSPODResult.xsd | 11 + .../schemas/PTSRRERequest.xsd | 22 + .../schemas/PTSRREResult.xsd | 11 + .../schemas/PTSTPODResult.xsd | 11 + .../schemas/PTSTPodRequest.xsd | 23 + .../schemas/PriorityMailRequest.xsd | 16 + .../schemas/PriorityMailResponse.xsd | 15 + .../schemas/RateV4Request.xsd | 81 + .../schemas/RateV4Response.xsd | 85 + .../schemas/SCANRequest.xsd | 44 + .../schemas/SCANResponse.xsd | 32 + .../schemas/SDCGetLocationsRequest.xsd | 18 + .../schemas/SDCGetLocationsResponse.xsd | 124 + .../schemas/StandardBRequest.xsd | 15 + .../schemas/StandardBResponse.xsd | 15 + .../schemas/TrackFieldRequest.xsd | 28 + .../schemas/TrackRequest.xsd | 20 + .../schemas/TrackResponse.xsd | 132 + .../schemas/ZipCodeLookupRequest.xsd | 25 + .../schemas/ZipCodeLookupResponse.xsd | 24 + .../schemas/eVSCancelRequest.xsd | 11 + .../schemas/eVSCancelResponse.xsd | 11 + .../schemas/eVSExpressMailIntlRequest.xsd | 131 + .../schemas/eVSExpressMailIntlResponse.xsd | 27 + .../schemas/eVSFirstClassMailIntlRequest.xsd | 121 + .../schemas/eVSFirstClassMailIntlResponse.xsd | 36 + .../schemas/eVSGXGGetLabelRequest.xsd | 132 + .../schemas/eVSGXGGetLabelResponse.xsd | 33 + .../schemas/eVSICancelRequest.xsd | 12 + .../schemas/eVSICancelResponse.xsd | 12 + .../schemas/eVSPriorityMailIntlRequest.xsd | 137 + .../schemas/eVSPriorityMailIntlResponse.xsd | 41 + .../usps_international/schemas/eVSRequest.xsd | 177 + .../schemas/eVSResponse.xsd | 53 + .../connectors/usps_international/setup.py | 6 +- .../usps_international/tests/__init__.py | 4 +- .../tests/usps_international/fixture.py | 30 +- .../tests/usps_international/test_rate.py | 322 +- .../test_shipment/__init__.py | 0 .../test_shipment/test_first_class.py | 198 + .../test_global_express_guaranteed.py | 186 + .../test_shipment/test_priority_express.py | 203 + .../test_shipment/test_priority_mail.py | 262 + .../tests/usps_international/test_tracking.py | 288 +- modules/connectors/usps_rest/README.md | 31 + modules/connectors/usps_rest/generate | 24 + .../karrio/mappers/usps_rest/__init__.py | 22 + .../karrio/mappers/usps_rest/mapper.py | 88 + .../karrio/mappers/usps_rest/proxy.py | 151 + .../karrio/mappers/usps_rest/settings.py | 23 + .../karrio/providers/usps_rest/__init__.py | 26 + .../karrio/providers/usps_rest/error.py | 26 + .../karrio/providers/usps_rest}/manifest.py | 18 +- .../providers/usps_rest/pickup/__init__.py | 12 + .../providers/usps_rest/pickup/cancel.py | 40 + .../providers/usps_rest/pickup/create.py | 102 + .../providers/usps_rest/pickup/update.py | 109 + .../karrio/providers/usps_rest/rate.py | 135 + .../providers/usps_rest/shipment/__init__.py | 8 + .../providers/usps_rest/shipment/cancel.py | 53 + .../providers/usps_rest/shipment/create.py | 263 + .../karrio/providers/usps_rest/tracking.py | 99 + .../karrio/providers/usps_rest/units.py | 198 + .../karrio/providers/usps_rest/utils.py | 87 + .../karrio/schemas/usps_rest/__init__.py | 0 .../schemas/usps_rest}/error_response.py | 0 .../schemas/usps_rest}/label_request.py | 0 .../schemas/usps_rest}/label_response.py | 0 .../schemas/usps_rest}/pickup_request.py | 0 .../schemas/usps_rest}/pickup_response.py | 0 .../usps_rest}/pickup_update_request.py | 0 .../usps_rest}/pickup_update_response.py | 0 .../karrio/schemas/usps_rest}/rate_request.py | 0 .../schemas/usps_rest}/rate_response.py | 0 .../schemas/usps_rest}/scan_form_request.py | 0 .../schemas/usps_rest}/scan_form_response.py | 0 .../schemas/usps_rest}/tracking_response.py | 0 .../schemas/error_response.json | 0 .../schemas/label_request.json | 0 .../schemas/label_response.json | 0 .../schemas/pickup_request.json | 0 .../schemas/pickup_response.json | 0 .../schemas/pickup_update_request.json | 0 .../schemas/pickup_update_response.json | 0 .../schemas/rate_request.json | 0 .../schemas/rate_response.json | 0 .../schemas/scan_form_request.json | 0 .../schemas/scan_form_response.json | 0 .../schemas/tracking_response.json | 0 modules/connectors/usps_rest/setup.py | 27 + .../connectors/usps_rest/tests/__init__.py | 5 + .../usps_rest/tests/usps_rest/__init__.py | 0 .../usps_rest/tests/usps_rest/fixture.py | 33 + .../tests/usps_rest}/test_manifest.py | 8 +- .../tests/usps_rest}/test_pickup.py | 18 +- .../usps_rest/tests/usps_rest/test_rate.py | 168 + .../tests/usps_rest/test_shipment.py | 328 + .../tests/usps_rest/test_tracking.py | 225 + .../usps_rest_international/README.md | 31 + .../usps_rest_international/generate | 24 + .../usps_rest_international/__init__.py | 22 + .../mappers/usps_rest_international/mapper.py | 88 + .../mappers/usps_rest_international/proxy.py | 151 + .../usps_rest_international/settings.py | 23 + .../usps_rest_international/__init__.py | 29 + .../usps_rest_international/error.py | 26 + .../usps_rest_international}/manifest.py | 11 +- .../pickup/__init__.py | 12 + .../usps_rest_international/pickup/cancel.py | 40 + .../usps_rest_international/pickup/create.py | 102 + .../usps_rest_international/pickup/update.py | 109 + .../providers/usps_rest_international/rate.py | 122 + .../shipment/__init__.py | 8 + .../shipment/cancel.py | 53 + .../shipment/create.py | 243 + .../usps_rest_international/tracking.py | 99 + .../usps_rest_international/units.py | 204 + .../usps_rest_international/utils.py | 87 + .../usps_rest_international/__init__.py | 0 .../error_response.py | 0 .../usps_rest_international}/label_request.py | 0 .../label_response.py | 0 .../pickup_request.py | 0 .../pickup_response.py | 0 .../pickup_update_request.py | 0 .../pickup_update_response.py | 0 .../usps_rest_international}/rate_request.py | 0 .../usps_rest_international}/rate_response.py | 0 .../scan_form_request.py | 0 .../scan_form_response.py | 0 .../tracking_response.py | 0 .../schemas/error_response.json | 0 .../schemas/label_request.json | 0 .../schemas/label_response.json | 0 .../schemas/pickup_request.json | 0 .../schemas/pickup_response.json | 0 .../schemas/pickup_update_request.json | 0 .../schemas/pickup_update_response.json | 0 .../schemas/rate_request.json | 0 .../schemas/rate_response.json | 0 .../schemas/scan_form_request.json | 0 .../schemas/scan_form_response.json | 0 .../schemas/tracking_response.json | 0 .../usps_rest_international/setup.py | 27 + .../usps_rest_international/tests/__init__.py | 5 + .../tests/usps_rest_international/__init__.py | 0 .../tests/usps_rest_international/fixture.py | 33 + .../usps_rest_international}/test_manifest.py | 8 +- .../usps_rest_international}/test_pickup.py | 18 +- .../usps_rest_international/test_rate.py | 169 + .../usps_rest_international}/test_shipment.py | 16 +- .../usps_rest_international/test_tracking.py | 225 + .../extension/models/eshipper_xml.py | 21 + .../server/providers/extension/models/usps.py | 21 +- .../extension/models/usps_international.py | 17 +- .../providers/extension/models/usps_rest.py | 23 + .../models/usps_rest_international.py | 23 + ..._uspsrestinternationalsettings_and_more.py | 184 + .../migrations/0078_auto_20240730_0153.py | 40 + ...harge_carriers_alter_surcharge_services.py | 3899 ++++++++ requirements.build.txt | 3 + requirements.sdk.dev.txt | 3 + requirements.server.dev.txt | 3 + source.requirements.txt | 3 + 463 files changed, 264240 insertions(+), 3333 deletions(-) create mode 100644 modules/connectors/eshipper_xml/README.md create mode 100755 modules/connectors/eshipper_xml/generate create mode 100644 modules/connectors/eshipper_xml/karrio/mappers/eshipper_xml/__init__.py create mode 100644 modules/connectors/eshipper_xml/karrio/mappers/eshipper_xml/mapper.py create mode 100644 modules/connectors/eshipper_xml/karrio/mappers/eshipper_xml/proxy.py create mode 100644 modules/connectors/eshipper_xml/karrio/mappers/eshipper_xml/settings.py create mode 100644 modules/connectors/eshipper_xml/karrio/providers/eshipper_xml/__init__.py create mode 100644 modules/connectors/eshipper_xml/karrio/providers/eshipper_xml/error.py create mode 100644 modules/connectors/eshipper_xml/karrio/providers/eshipper_xml/quote.py create mode 100644 modules/connectors/eshipper_xml/karrio/providers/eshipper_xml/shipping.py create mode 100644 modules/connectors/eshipper_xml/karrio/providers/eshipper_xml/units.py create mode 100644 modules/connectors/eshipper_xml/karrio/providers/eshipper_xml/utils.py create mode 100644 modules/connectors/eshipper_xml/karrio/providers/eshipper_xml/void_shipment.py create mode 100644 modules/connectors/eshipper_xml/karrio/schemas/eshipper_xml/__init__.py create mode 100644 modules/connectors/eshipper_xml/karrio/schemas/eshipper_xml/error.py create mode 100644 modules/connectors/eshipper_xml/karrio/schemas/eshipper_xml/quote_reply.py create mode 100644 modules/connectors/eshipper_xml/karrio/schemas/eshipper_xml/quote_request.py create mode 100644 modules/connectors/eshipper_xml/karrio/schemas/eshipper_xml/shipment_cancel_reply.py create mode 100644 modules/connectors/eshipper_xml/karrio/schemas/eshipper_xml/shipment_cancel_request.py create mode 100644 modules/connectors/eshipper_xml/karrio/schemas/eshipper_xml/shipping_reply.py create mode 100644 modules/connectors/eshipper_xml/karrio/schemas/eshipper_xml/shipping_request.py create mode 100644 modules/connectors/eshipper_xml/setup.py create mode 100644 modules/connectors/eshipper_xml/tests/__init__.py create mode 100644 modules/connectors/eshipper_xml/tests/eshipper_xml/__init__.py create mode 100644 modules/connectors/eshipper_xml/tests/eshipper_xml/fixture.py create mode 100644 modules/connectors/eshipper_xml/tests/eshipper_xml/test_rate.py create mode 100644 modules/connectors/eshipper_xml/tests/eshipper_xml/test_shipment.py create mode 100644 modules/connectors/eshipper_xml/vendor/documentation/eShipper API v3.2.1.pdf create mode 100644 modules/connectors/eshipper_xml/vendor/sample/sample_quote_reply.xml create mode 100644 modules/connectors/eshipper_xml/vendor/sample/sample_quote_request.xml create mode 100644 modules/connectors/eshipper_xml/vendor/sample/sample_shipment_cancel_reply.xml create mode 100644 modules/connectors/eshipper_xml/vendor/sample/sample_shipment_cancel_request.xml create mode 100644 modules/connectors/eshipper_xml/vendor/sample/sample_shipping_reply.xml create mode 100644 modules/connectors/eshipper_xml/vendor/sample/sample_shipping_request.xml create mode 100644 modules/connectors/eshipper_xml/vendor/schemas/error.xsd create mode 100644 modules/connectors/eshipper_xml/vendor/schemas/quote_reply.xsd create mode 100644 modules/connectors/eshipper_xml/vendor/schemas/quote_request.xsd create mode 100644 modules/connectors/eshipper_xml/vendor/schemas/shipment_cancel_reply.xsd create mode 100644 modules/connectors/eshipper_xml/vendor/schemas/shipment_cancel_request.xsd create mode 100644 modules/connectors/eshipper_xml/vendor/schemas/shipping_reply.xsd create mode 100644 modules/connectors/eshipper_xml/vendor/schemas/shipping_request.xsd create mode 100644 modules/connectors/usps/karrio/schemas/usps/address_validate_request.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/address_validate_response.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/carrier_pickup_availability_request.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/carrier_pickup_availability_response.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/carrier_pickup_cancel_request.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/carrier_pickup_cancel_response.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/carrier_pickup_change_request.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/carrier_pickup_change_response.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/carrier_pickup_inquiry_request.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/carrier_pickup_inquiry_response.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/carrier_pickup_schedule_request.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/carrier_pickup_schedule_response.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/city_state_lookup_request.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/city_state_lookup_response.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/emrsv4_0_bulk_request.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/error.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/evs_cancel_request.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/evs_cancel_response.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/evs_express_mail_intl_request.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/evs_express_mail_intl_response.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/evs_first_class_mail_intl_request.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/evs_first_class_mail_intl_response.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/evs_gxg_get_label_request.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/evs_gxg_get_label_response.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/evs_priority_mail_intl_request.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/evs_priority_mail_intl_response.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/evs_request.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/evs_response.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/evsi_cancel_request.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/evsi_cancel_response.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/express_mail_commitment_request.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/express_mail_commitment_response.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/first_class_mail_request.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/first_class_mail_response.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/hfp_facility_info_request.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/hfp_facility_info_response.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/intl_rate_v2_request.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/intl_rate_v2_response.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/mrsv4_0_request.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/priority_mail_request.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/priority_mail_response.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/pts_email_request.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/pts_emailresult.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/ptspod_result.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/ptsrre_result.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/ptstpod_request.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/ptstpod_result.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/rate_v4_request.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/rate_v4_response.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/scan_request.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/scan_response.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/sdc_get_locations_request.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/sdc_get_locations_response.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/standard_b_request.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/standard_b_response.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/track_field_request.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/track_request.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/track_response.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/zip_code_lookup_request.py create mode 100644 modules/connectors/usps/karrio/schemas/usps/zip_code_lookup_response.py create mode 100644 modules/connectors/usps/schemas/AddressValidateRequest.xsd create mode 100644 modules/connectors/usps/schemas/AddressValidateResponse.xsd create mode 100644 modules/connectors/usps/schemas/CarrierPickupAvailabilityRequest.xsd create mode 100644 modules/connectors/usps/schemas/CarrierPickupAvailabilityResponse.xsd create mode 100644 modules/connectors/usps/schemas/CarrierPickupCancelRequest.xsd create mode 100644 modules/connectors/usps/schemas/CarrierPickupCancelResponse.xsd create mode 100644 modules/connectors/usps/schemas/CarrierPickupChangeRequest.xsd create mode 100644 modules/connectors/usps/schemas/CarrierPickupChangeResponse.xsd create mode 100644 modules/connectors/usps/schemas/CarrierPickupInquiryRequest.xsd create mode 100644 modules/connectors/usps/schemas/CarrierPickupInquiryResponse.xsd create mode 100644 modules/connectors/usps/schemas/CarrierPickupScheduleRequest.xsd create mode 100644 modules/connectors/usps/schemas/CarrierPickupScheduleResponse.xsd create mode 100644 modules/connectors/usps/schemas/CityStateLookupRequest.xsd create mode 100644 modules/connectors/usps/schemas/CityStateLookupResponse.xsd create mode 100644 modules/connectors/usps/schemas/EMRSV4.0BulkRequest.xsd create mode 100644 modules/connectors/usps/schemas/Error.xsd create mode 100644 modules/connectors/usps/schemas/ExpressMailCommitmentRequest.xsd create mode 100644 modules/connectors/usps/schemas/ExpressMailCommitmentResponse.xsd create mode 100644 modules/connectors/usps/schemas/FirstClassMailRequest.xsd create mode 100644 modules/connectors/usps/schemas/FirstClassMailResponse.xsd create mode 100644 modules/connectors/usps/schemas/HFPFacilityInfoRequest.xsd create mode 100644 modules/connectors/usps/schemas/HFPFacilityInfoResponse.xsd create mode 100644 modules/connectors/usps/schemas/IntlRateV2Request.xsd create mode 100644 modules/connectors/usps/schemas/IntlRateV2Response.xsd create mode 100644 modules/connectors/usps/schemas/MRSV4.0Request.xsd create mode 100644 modules/connectors/usps/schemas/PTSEmailRequest.xsd create mode 100644 modules/connectors/usps/schemas/PTSEmailResult.xsd create mode 100644 modules/connectors/usps/schemas/PTSPODRequest.xsd create mode 100644 modules/connectors/usps/schemas/PTSPODResult.xsd create mode 100644 modules/connectors/usps/schemas/PTSRRERequest.xsd create mode 100644 modules/connectors/usps/schemas/PTSRREResult.xsd create mode 100644 modules/connectors/usps/schemas/PTSTPODResult.xsd create mode 100644 modules/connectors/usps/schemas/PTSTPodRequest.xsd create mode 100644 modules/connectors/usps/schemas/PriorityMailRequest.xsd create mode 100644 modules/connectors/usps/schemas/PriorityMailResponse.xsd create mode 100644 modules/connectors/usps/schemas/RateV4Request.xsd create mode 100644 modules/connectors/usps/schemas/RateV4Response.xsd create mode 100644 modules/connectors/usps/schemas/SCANRequest.xsd create mode 100644 modules/connectors/usps/schemas/SCANResponse.xsd create mode 100644 modules/connectors/usps/schemas/SDCGetLocationsRequest.xsd create mode 100644 modules/connectors/usps/schemas/SDCGetLocationsResponse.xsd create mode 100644 modules/connectors/usps/schemas/StandardBRequest.xsd create mode 100644 modules/connectors/usps/schemas/StandardBResponse.xsd create mode 100644 modules/connectors/usps/schemas/TrackFieldRequest.xsd create mode 100644 modules/connectors/usps/schemas/TrackRequest.xsd create mode 100644 modules/connectors/usps/schemas/TrackResponse.xsd create mode 100644 modules/connectors/usps/schemas/ZipCodeLookupRequest.xsd create mode 100644 modules/connectors/usps/schemas/ZipCodeLookupResponse.xsd create mode 100644 modules/connectors/usps/schemas/eVSCancelRequest.xsd create mode 100644 modules/connectors/usps/schemas/eVSCancelResponse.xsd create mode 100644 modules/connectors/usps/schemas/eVSExpressMailIntlRequest.xsd create mode 100644 modules/connectors/usps/schemas/eVSExpressMailIntlResponse.xsd create mode 100644 modules/connectors/usps/schemas/eVSFirstClassMailIntlRequest.xsd create mode 100644 modules/connectors/usps/schemas/eVSFirstClassMailIntlResponse.xsd create mode 100644 modules/connectors/usps/schemas/eVSGXGGetLabelRequest.xsd create mode 100644 modules/connectors/usps/schemas/eVSGXGGetLabelResponse.xsd create mode 100644 modules/connectors/usps/schemas/eVSICancelRequest.xsd create mode 100644 modules/connectors/usps/schemas/eVSICancelResponse.xsd create mode 100644 modules/connectors/usps/schemas/eVSPriorityMailIntlRequest.xsd create mode 100644 modules/connectors/usps/schemas/eVSPriorityMailIntlResponse.xsd create mode 100644 modules/connectors/usps/schemas/eVSRequest.xsd create mode 100644 modules/connectors/usps/schemas/eVSResponse.xsd create mode 100644 modules/connectors/usps_international/karrio/providers/usps_international/shipment/first_class_mail.py create mode 100644 modules/connectors/usps_international/karrio/providers/usps_international/shipment/global_express_guaranteed.py create mode 100644 modules/connectors/usps_international/karrio/providers/usps_international/shipment/priority_express.py create mode 100644 modules/connectors/usps_international/karrio/providers/usps_international/shipment/priority_mail.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/address_validate_request.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/address_validate_response.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_availability_request.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_availability_response.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_cancel_request.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_cancel_response.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_change_request.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_change_response.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_inquiry_request.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_inquiry_response.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_schedule_request.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_schedule_response.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/city_state_lookup_request.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/city_state_lookup_response.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/emrsv4_0_bulk_request.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/error.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/evs_cancel_request.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/evs_cancel_response.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/evs_express_mail_intl_request.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/evs_express_mail_intl_response.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/evs_first_class_mail_intl_request.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/evs_first_class_mail_intl_response.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/evs_gxg_get_label_request.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/evs_gxg_get_label_response.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/evs_priority_mail_intl_request.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/evs_priority_mail_intl_response.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/evs_request.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/evs_response.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/evsi_cancel_request.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/evsi_cancel_response.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/express_mail_commitment_request.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/express_mail_commitment_response.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/first_class_mail_request.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/first_class_mail_response.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/hfp_facility_info_request.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/hfp_facility_info_response.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/intl_rate_v2_request.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/intl_rate_v2_response.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/mrsv4_0_request.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/priority_mail_request.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/priority_mail_response.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/pts_email_request.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/pts_emailresult.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/ptspod_result.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/ptsrre_result.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/ptstpod_request.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/ptstpod_result.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/rate_v4_request.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/rate_v4_response.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/scan_request.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/scan_response.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/sdc_get_locations_request.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/sdc_get_locations_response.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/standard_b_request.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/standard_b_response.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/track_field_request.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/track_request.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/track_response.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/zip_code_lookup_request.py create mode 100644 modules/connectors/usps_international/karrio/schemas/usps_international/zip_code_lookup_response.py create mode 100644 modules/connectors/usps_international/schemas/AddressValidateRequest.xsd create mode 100644 modules/connectors/usps_international/schemas/AddressValidateResponse.xsd create mode 100644 modules/connectors/usps_international/schemas/CarrierPickupAvailabilityRequest.xsd create mode 100644 modules/connectors/usps_international/schemas/CarrierPickupAvailabilityResponse.xsd create mode 100644 modules/connectors/usps_international/schemas/CarrierPickupCancelRequest.xsd create mode 100644 modules/connectors/usps_international/schemas/CarrierPickupCancelResponse.xsd create mode 100644 modules/connectors/usps_international/schemas/CarrierPickupChangeRequest.xsd create mode 100644 modules/connectors/usps_international/schemas/CarrierPickupChangeResponse.xsd create mode 100644 modules/connectors/usps_international/schemas/CarrierPickupInquiryRequest.xsd create mode 100644 modules/connectors/usps_international/schemas/CarrierPickupInquiryResponse.xsd create mode 100644 modules/connectors/usps_international/schemas/CarrierPickupScheduleRequest.xsd create mode 100644 modules/connectors/usps_international/schemas/CarrierPickupScheduleResponse.xsd create mode 100644 modules/connectors/usps_international/schemas/CityStateLookupRequest.xsd create mode 100644 modules/connectors/usps_international/schemas/CityStateLookupResponse.xsd create mode 100644 modules/connectors/usps_international/schemas/EMRSV4.0BulkRequest.xsd create mode 100644 modules/connectors/usps_international/schemas/Error.xsd create mode 100644 modules/connectors/usps_international/schemas/ExpressMailCommitmentRequest.xsd create mode 100644 modules/connectors/usps_international/schemas/ExpressMailCommitmentResponse.xsd create mode 100644 modules/connectors/usps_international/schemas/FirstClassMailRequest.xsd create mode 100644 modules/connectors/usps_international/schemas/FirstClassMailResponse.xsd create mode 100644 modules/connectors/usps_international/schemas/HFPFacilityInfoRequest.xsd create mode 100644 modules/connectors/usps_international/schemas/HFPFacilityInfoResponse.xsd create mode 100644 modules/connectors/usps_international/schemas/IntlRateV2Request.xsd create mode 100644 modules/connectors/usps_international/schemas/IntlRateV2Response.xsd create mode 100644 modules/connectors/usps_international/schemas/MRSV4.0Request.xsd create mode 100644 modules/connectors/usps_international/schemas/PTSEmailRequest.xsd create mode 100644 modules/connectors/usps_international/schemas/PTSEmailResult.xsd create mode 100644 modules/connectors/usps_international/schemas/PTSPODRequest.xsd create mode 100644 modules/connectors/usps_international/schemas/PTSPODResult.xsd create mode 100644 modules/connectors/usps_international/schemas/PTSRRERequest.xsd create mode 100644 modules/connectors/usps_international/schemas/PTSRREResult.xsd create mode 100644 modules/connectors/usps_international/schemas/PTSTPODResult.xsd create mode 100644 modules/connectors/usps_international/schemas/PTSTPodRequest.xsd create mode 100644 modules/connectors/usps_international/schemas/PriorityMailRequest.xsd create mode 100644 modules/connectors/usps_international/schemas/PriorityMailResponse.xsd create mode 100644 modules/connectors/usps_international/schemas/RateV4Request.xsd create mode 100644 modules/connectors/usps_international/schemas/RateV4Response.xsd create mode 100644 modules/connectors/usps_international/schemas/SCANRequest.xsd create mode 100644 modules/connectors/usps_international/schemas/SCANResponse.xsd create mode 100644 modules/connectors/usps_international/schemas/SDCGetLocationsRequest.xsd create mode 100644 modules/connectors/usps_international/schemas/SDCGetLocationsResponse.xsd create mode 100644 modules/connectors/usps_international/schemas/StandardBRequest.xsd create mode 100644 modules/connectors/usps_international/schemas/StandardBResponse.xsd create mode 100644 modules/connectors/usps_international/schemas/TrackFieldRequest.xsd create mode 100644 modules/connectors/usps_international/schemas/TrackRequest.xsd create mode 100644 modules/connectors/usps_international/schemas/TrackResponse.xsd create mode 100644 modules/connectors/usps_international/schemas/ZipCodeLookupRequest.xsd create mode 100644 modules/connectors/usps_international/schemas/ZipCodeLookupResponse.xsd create mode 100644 modules/connectors/usps_international/schemas/eVSCancelRequest.xsd create mode 100644 modules/connectors/usps_international/schemas/eVSCancelResponse.xsd create mode 100644 modules/connectors/usps_international/schemas/eVSExpressMailIntlRequest.xsd create mode 100644 modules/connectors/usps_international/schemas/eVSExpressMailIntlResponse.xsd create mode 100644 modules/connectors/usps_international/schemas/eVSFirstClassMailIntlRequest.xsd create mode 100644 modules/connectors/usps_international/schemas/eVSFirstClassMailIntlResponse.xsd create mode 100644 modules/connectors/usps_international/schemas/eVSGXGGetLabelRequest.xsd create mode 100644 modules/connectors/usps_international/schemas/eVSGXGGetLabelResponse.xsd create mode 100644 modules/connectors/usps_international/schemas/eVSICancelRequest.xsd create mode 100644 modules/connectors/usps_international/schemas/eVSICancelResponse.xsd create mode 100644 modules/connectors/usps_international/schemas/eVSPriorityMailIntlRequest.xsd create mode 100644 modules/connectors/usps_international/schemas/eVSPriorityMailIntlResponse.xsd create mode 100644 modules/connectors/usps_international/schemas/eVSRequest.xsd create mode 100644 modules/connectors/usps_international/schemas/eVSResponse.xsd create mode 100644 modules/connectors/usps_international/tests/usps_international/test_shipment/__init__.py create mode 100644 modules/connectors/usps_international/tests/usps_international/test_shipment/test_first_class.py create mode 100644 modules/connectors/usps_international/tests/usps_international/test_shipment/test_global_express_guaranteed.py create mode 100644 modules/connectors/usps_international/tests/usps_international/test_shipment/test_priority_express.py create mode 100644 modules/connectors/usps_international/tests/usps_international/test_shipment/test_priority_mail.py create mode 100644 modules/connectors/usps_rest/README.md create mode 100755 modules/connectors/usps_rest/generate create mode 100644 modules/connectors/usps_rest/karrio/mappers/usps_rest/__init__.py create mode 100644 modules/connectors/usps_rest/karrio/mappers/usps_rest/mapper.py create mode 100644 modules/connectors/usps_rest/karrio/mappers/usps_rest/proxy.py create mode 100644 modules/connectors/usps_rest/karrio/mappers/usps_rest/settings.py create mode 100644 modules/connectors/usps_rest/karrio/providers/usps_rest/__init__.py create mode 100644 modules/connectors/usps_rest/karrio/providers/usps_rest/error.py rename modules/connectors/{usps_international/karrio/providers/usps_international => usps_rest/karrio/providers/usps_rest}/manifest.py (85%) create mode 100644 modules/connectors/usps_rest/karrio/providers/usps_rest/pickup/__init__.py create mode 100644 modules/connectors/usps_rest/karrio/providers/usps_rest/pickup/cancel.py create mode 100644 modules/connectors/usps_rest/karrio/providers/usps_rest/pickup/create.py create mode 100644 modules/connectors/usps_rest/karrio/providers/usps_rest/pickup/update.py create mode 100644 modules/connectors/usps_rest/karrio/providers/usps_rest/rate.py create mode 100644 modules/connectors/usps_rest/karrio/providers/usps_rest/shipment/__init__.py create mode 100644 modules/connectors/usps_rest/karrio/providers/usps_rest/shipment/cancel.py create mode 100644 modules/connectors/usps_rest/karrio/providers/usps_rest/shipment/create.py create mode 100644 modules/connectors/usps_rest/karrio/providers/usps_rest/tracking.py create mode 100644 modules/connectors/usps_rest/karrio/providers/usps_rest/units.py create mode 100644 modules/connectors/usps_rest/karrio/providers/usps_rest/utils.py create mode 100644 modules/connectors/usps_rest/karrio/schemas/usps_rest/__init__.py rename modules/connectors/{usps/karrio/schemas/usps => usps_rest/karrio/schemas/usps_rest}/error_response.py (100%) rename modules/connectors/{usps/karrio/schemas/usps => usps_rest/karrio/schemas/usps_rest}/label_request.py (100%) rename modules/connectors/{usps/karrio/schemas/usps => usps_rest/karrio/schemas/usps_rest}/label_response.py (100%) rename modules/connectors/{usps/karrio/schemas/usps => usps_rest/karrio/schemas/usps_rest}/pickup_request.py (100%) rename modules/connectors/{usps/karrio/schemas/usps => usps_rest/karrio/schemas/usps_rest}/pickup_response.py (100%) rename modules/connectors/{usps/karrio/schemas/usps => usps_rest/karrio/schemas/usps_rest}/pickup_update_request.py (100%) rename modules/connectors/{usps/karrio/schemas/usps => usps_rest/karrio/schemas/usps_rest}/pickup_update_response.py (100%) rename modules/connectors/{usps/karrio/schemas/usps => usps_rest/karrio/schemas/usps_rest}/rate_request.py (100%) rename modules/connectors/{usps/karrio/schemas/usps => usps_rest/karrio/schemas/usps_rest}/rate_response.py (100%) rename modules/connectors/{usps/karrio/schemas/usps => usps_rest/karrio/schemas/usps_rest}/scan_form_request.py (100%) rename modules/connectors/{usps/karrio/schemas/usps => usps_rest/karrio/schemas/usps_rest}/scan_form_response.py (100%) rename modules/connectors/{usps/karrio/schemas/usps => usps_rest/karrio/schemas/usps_rest}/tracking_response.py (100%) rename modules/connectors/{usps => usps_rest}/schemas/error_response.json (100%) rename modules/connectors/{usps => usps_rest}/schemas/label_request.json (100%) rename modules/connectors/{usps => usps_rest}/schemas/label_response.json (100%) rename modules/connectors/{usps => usps_rest}/schemas/pickup_request.json (100%) rename modules/connectors/{usps => usps_rest}/schemas/pickup_response.json (100%) rename modules/connectors/{usps => usps_rest}/schemas/pickup_update_request.json (100%) rename modules/connectors/{usps => usps_rest}/schemas/pickup_update_response.json (100%) rename modules/connectors/{usps => usps_rest}/schemas/rate_request.json (100%) rename modules/connectors/{usps => usps_rest}/schemas/rate_response.json (100%) rename modules/connectors/{usps => usps_rest}/schemas/scan_form_request.json (100%) rename modules/connectors/{usps => usps_rest}/schemas/scan_form_response.json (100%) rename modules/connectors/{usps => usps_rest}/schemas/tracking_response.json (100%) create mode 100644 modules/connectors/usps_rest/setup.py create mode 100644 modules/connectors/usps_rest/tests/__init__.py create mode 100644 modules/connectors/usps_rest/tests/usps_rest/__init__.py create mode 100644 modules/connectors/usps_rest/tests/usps_rest/fixture.py rename modules/connectors/{usps/tests/usps => usps_rest/tests/usps_rest}/test_manifest.py (98%) rename modules/connectors/{usps/tests/usps => usps_rest/tests/usps_rest}/test_pickup.py (93%) create mode 100644 modules/connectors/usps_rest/tests/usps_rest/test_rate.py create mode 100644 modules/connectors/usps_rest/tests/usps_rest/test_shipment.py create mode 100644 modules/connectors/usps_rest/tests/usps_rest/test_tracking.py create mode 100644 modules/connectors/usps_rest_international/README.md create mode 100755 modules/connectors/usps_rest_international/generate create mode 100644 modules/connectors/usps_rest_international/karrio/mappers/usps_rest_international/__init__.py create mode 100644 modules/connectors/usps_rest_international/karrio/mappers/usps_rest_international/mapper.py create mode 100644 modules/connectors/usps_rest_international/karrio/mappers/usps_rest_international/proxy.py create mode 100644 modules/connectors/usps_rest_international/karrio/mappers/usps_rest_international/settings.py create mode 100644 modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/__init__.py create mode 100644 modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/error.py rename modules/connectors/{usps/karrio/providers/usps => usps_rest_international/karrio/providers/usps_rest_international}/manifest.py (90%) create mode 100644 modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/pickup/__init__.py create mode 100644 modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/pickup/cancel.py create mode 100644 modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/pickup/create.py create mode 100644 modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/pickup/update.py create mode 100644 modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/rate.py create mode 100644 modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/shipment/__init__.py create mode 100644 modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/shipment/cancel.py create mode 100644 modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/shipment/create.py create mode 100644 modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/tracking.py create mode 100644 modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/units.py create mode 100644 modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/utils.py create mode 100644 modules/connectors/usps_rest_international/karrio/schemas/usps_rest_international/__init__.py rename modules/connectors/{usps_international/karrio/schemas/usps_international => usps_rest_international/karrio/schemas/usps_rest_international}/error_response.py (100%) rename modules/connectors/{usps_international/karrio/schemas/usps_international => usps_rest_international/karrio/schemas/usps_rest_international}/label_request.py (100%) rename modules/connectors/{usps_international/karrio/schemas/usps_international => usps_rest_international/karrio/schemas/usps_rest_international}/label_response.py (100%) rename modules/connectors/{usps_international/karrio/schemas/usps_international => usps_rest_international/karrio/schemas/usps_rest_international}/pickup_request.py (100%) rename modules/connectors/{usps_international/karrio/schemas/usps_international => usps_rest_international/karrio/schemas/usps_rest_international}/pickup_response.py (100%) rename modules/connectors/{usps_international/karrio/schemas/usps_international => usps_rest_international/karrio/schemas/usps_rest_international}/pickup_update_request.py (100%) rename modules/connectors/{usps_international/karrio/schemas/usps_international => usps_rest_international/karrio/schemas/usps_rest_international}/pickup_update_response.py (100%) rename modules/connectors/{usps_international/karrio/schemas/usps_international => usps_rest_international/karrio/schemas/usps_rest_international}/rate_request.py (100%) rename modules/connectors/{usps_international/karrio/schemas/usps_international => usps_rest_international/karrio/schemas/usps_rest_international}/rate_response.py (100%) rename modules/connectors/{usps_international/karrio/schemas/usps_international => usps_rest_international/karrio/schemas/usps_rest_international}/scan_form_request.py (100%) rename modules/connectors/{usps_international/karrio/schemas/usps_international => usps_rest_international/karrio/schemas/usps_rest_international}/scan_form_response.py (100%) rename modules/connectors/{usps_international/karrio/schemas/usps_international => usps_rest_international/karrio/schemas/usps_rest_international}/tracking_response.py (100%) rename modules/connectors/{usps_international => usps_rest_international}/schemas/error_response.json (100%) rename modules/connectors/{usps_international => usps_rest_international}/schemas/label_request.json (100%) rename modules/connectors/{usps_international => usps_rest_international}/schemas/label_response.json (100%) rename modules/connectors/{usps_international => usps_rest_international}/schemas/pickup_request.json (100%) rename modules/connectors/{usps_international => usps_rest_international}/schemas/pickup_response.json (100%) rename modules/connectors/{usps_international => usps_rest_international}/schemas/pickup_update_request.json (100%) rename modules/connectors/{usps_international => usps_rest_international}/schemas/pickup_update_response.json (100%) rename modules/connectors/{usps_international => usps_rest_international}/schemas/rate_request.json (100%) rename modules/connectors/{usps_international => usps_rest_international}/schemas/rate_response.json (100%) rename modules/connectors/{usps_international => usps_rest_international}/schemas/scan_form_request.json (100%) rename modules/connectors/{usps_international => usps_rest_international}/schemas/scan_form_response.json (100%) rename modules/connectors/{usps_international => usps_rest_international}/schemas/tracking_response.json (100%) create mode 100644 modules/connectors/usps_rest_international/setup.py create mode 100644 modules/connectors/usps_rest_international/tests/__init__.py create mode 100644 modules/connectors/usps_rest_international/tests/usps_rest_international/__init__.py create mode 100644 modules/connectors/usps_rest_international/tests/usps_rest_international/fixture.py rename modules/connectors/{usps_international/tests/usps_international => usps_rest_international/tests/usps_rest_international}/test_manifest.py (98%) rename modules/connectors/{usps_international/tests/usps_international => usps_rest_international/tests/usps_rest_international}/test_pickup.py (92%) create mode 100644 modules/connectors/usps_rest_international/tests/usps_rest_international/test_rate.py rename modules/connectors/{usps_international/tests/usps_international => usps_rest_international/tests/usps_rest_international}/test_shipment.py (93%) create mode 100644 modules/connectors/usps_rest_international/tests/usps_rest_international/test_tracking.py create mode 100644 modules/core/karrio/server/providers/extension/models/eshipper_xml.py create mode 100644 modules/core/karrio/server/providers/extension/models/usps_rest.py create mode 100644 modules/core/karrio/server/providers/extension/models/usps_rest_international.py create mode 100644 modules/core/karrio/server/providers/migrations/0077_eshipperxmlsettings_uspsrestinternationalsettings_and_more.py create mode 100644 modules/core/karrio/server/providers/migrations/0078_auto_20240730_0153.py create mode 100644 modules/pricing/karrio/server/pricing/migrations/0056_alter_surcharge_carriers_alter_surcharge_services.py diff --git a/modules/connectors/eshipper_xml/README.md b/modules/connectors/eshipper_xml/README.md new file mode 100644 index 0000000000..22b454bf93 --- /dev/null +++ b/modules/connectors/eshipper_xml/README.md @@ -0,0 +1,30 @@ +# karrio.eshipper_xml + +This package is a eShipper XML extension of the [karrio](https://pypi.org/project/karrio) multi carrier shipping SDK. + +## Requirements + +`Python 3.7+` + +## Installation + +```bash +pip install karrio.eshipper_xml +``` + +## Usage + +```python +import karrio +from karrio.mappers.eshipper_xml.settings import Settings + + +# Initialize a carrier gateway +eshipper_xml = karrio.gateway["eshipper_xml"].create( + Settings( + ... + ) +) +``` + +Check the [karrio Mutli-carrier SDK docs](https://docs.karrio.io) for Shipping API requests diff --git a/modules/connectors/eshipper_xml/generate b/modules/connectors/eshipper_xml/generate new file mode 100755 index 0000000000..82c5aa00a8 --- /dev/null +++ b/modules/connectors/eshipper_xml/generate @@ -0,0 +1,12 @@ +SCHEMAS=./vendor/schemas +LIB_MODULES=./karrio/schemas/eshipper_xml +find "${LIB_MODULES}" -name "*.py" -exec rm -r {} \; +touch "${LIB_MODULES}/__init__.py" + +generateDS --no-namespace-defs -o "${LIB_MODULES}/quote_request.py" $SCHEMAS/quote_request.xsd +generateDS --no-namespace-defs -o "${LIB_MODULES}/quote_reply.py" $SCHEMAS/quote_reply.xsd +generateDS --no-namespace-defs -o "${LIB_MODULES}/shipping_request.py" $SCHEMAS/shipping_request.xsd +generateDS --no-namespace-defs -o "${LIB_MODULES}/shipping_reply.py" $SCHEMAS/shipping_reply.xsd +generateDS --no-namespace-defs -o "${LIB_MODULES}/error.py" $SCHEMAS/error.xsd +generateDS --no-namespace-defs -o "${LIB_MODULES}/shipment_cancel_request.py" $SCHEMAS/shipment_cancel_request.xsd +generateDS --no-namespace-defs -o "${LIB_MODULES}/shipment_cancel_reply.py" $SCHEMAS/shipment_cancel_reply.xsd diff --git a/modules/connectors/eshipper_xml/karrio/mappers/eshipper_xml/__init__.py b/modules/connectors/eshipper_xml/karrio/mappers/eshipper_xml/__init__.py new file mode 100644 index 0000000000..88d880039b --- /dev/null +++ b/modules/connectors/eshipper_xml/karrio/mappers/eshipper_xml/__init__.py @@ -0,0 +1,21 @@ +from karrio.core.metadata import Metadata + +from karrio.mappers.eshipper_xml.mapper import Mapper +from karrio.mappers.eshipper_xml.proxy import Proxy +from karrio.mappers.eshipper_xml.settings import Settings +import karrio.providers.eshipper_xml.units as units + + +METADATA = Metadata( + id="eshipper_xml", + label="eShipper XML", + is_hub=True, + # Integrations + Mapper=Mapper, + Proxy=Proxy, + Settings=Settings, + # Data Units + options=units.ShippingOption, + services=units.ShippingService, + hub_carriers=units.CARRIER_IDS, +) diff --git a/modules/connectors/eshipper_xml/karrio/mappers/eshipper_xml/mapper.py b/modules/connectors/eshipper_xml/karrio/mappers/eshipper_xml/mapper.py new file mode 100644 index 0000000000..81a4fa7f3c --- /dev/null +++ b/modules/connectors/eshipper_xml/karrio/mappers/eshipper_xml/mapper.py @@ -0,0 +1,55 @@ +from typing import List, Tuple +from karrio.api.mapper import Mapper as BaseMapper +from karrio.mappers.eshipper_xml.settings import Settings +from karrio.core.utils.serializable import Deserializable, Serializable +from karrio.core.models import ( + RateRequest, + ShipmentRequest, + ShipmentDetails, + ShipmentCancelRequest, + RateDetails, + Message, + ConfirmationDetails, +) +from karrio.providers.eshipper_xml import ( + parse_quote_reply, + quote_request, + parse_shipping_reply, + shipping_request, + shipment_cancel_request, + parse_shipment_cancel_reply, +) + + +class Mapper(BaseMapper): + settings: Settings + + # Request Mappers + + def create_rate_request(self, payload: RateRequest) -> Serializable: + return quote_request(payload, self.settings) + + def create_shipment_request(self, payload: ShipmentRequest) -> Serializable: + return shipping_request(payload, self.settings) + + def create_cancel_shipment_request( + self, payload: ShipmentCancelRequest + ) -> Serializable: + return shipment_cancel_request(payload, self.settings) + + # Response Parsers + + def parse_rate_response( + self, response: Deserializable + ) -> Tuple[List[RateDetails], List[Message]]: + return parse_quote_reply(response, self.settings) + + def parse_shipment_response( + self, response: Deserializable + ) -> Tuple[ShipmentDetails, List[Message]]: + return parse_shipping_reply(response, self.settings) + + def parse_cancel_shipment_response( + self, response: Deserializable + ) -> Tuple[ConfirmationDetails, List[Message]]: + return parse_shipment_cancel_reply(response, self.settings) diff --git a/modules/connectors/eshipper_xml/karrio/mappers/eshipper_xml/proxy.py b/modules/connectors/eshipper_xml/karrio/mappers/eshipper_xml/proxy.py new file mode 100644 index 0000000000..5c97b37115 --- /dev/null +++ b/modules/connectors/eshipper_xml/karrio/mappers/eshipper_xml/proxy.py @@ -0,0 +1,38 @@ +from karrio.core.utils import XP, request as http +from karrio.api.proxy import Proxy as BaseProxy +from karrio.mappers.eshipper_xml.settings import Settings +from karrio.core.utils.serializable import Serializable, Deserializable + + +class Proxy(BaseProxy): + settings: Settings + + def get_rates(self, request: Serializable) -> Deserializable: + response = http( + url=self.settings.server_url, + data=request.serialize(), + trace=self.trace_as("xml"), + method="POST", + headers={"Content-Type": "application/xml"}, + ) + return Deserializable(response, XP.to_xml) + + def create_shipment(self, request: Serializable) -> Deserializable: + response = http( + url=self.settings.server_url, + data=request.serialize(), + trace=self.trace_as("xml"), + method="POST", + headers={"Content-Type": "application/xml"}, + ) + return Deserializable(response, XP.to_xml) + + def cancel_shipment(self, request: Serializable) -> Deserializable: + response = http( + url=self.settings.server_url, + data=request.serialize(), + trace=self.trace_as("xml"), + method="POST", + headers={"Content-Type": "application/xml"}, + ) + return Deserializable(response, XP.to_xml) diff --git a/modules/connectors/eshipper_xml/karrio/mappers/eshipper_xml/settings.py b/modules/connectors/eshipper_xml/karrio/mappers/eshipper_xml/settings.py new file mode 100644 index 0000000000..0dfb097f31 --- /dev/null +++ b/modules/connectors/eshipper_xml/karrio/mappers/eshipper_xml/settings.py @@ -0,0 +1,19 @@ +"""Karrio eshipper_xml connection settings.""" + +import attr +from karrio.providers.eshipper_xml.utils import Settings as BaseSettings + + +@attr.s(auto_attribs=True) +class Settings(BaseSettings): + """eshipper_xml connection settings.""" + + username: str + password: str + + id: str = None + test_mode: bool = False + carrier_id: str = "eshipper_xml" + account_country_code: str = None + metadata: dict = {} + config: dict = {} diff --git a/modules/connectors/eshipper_xml/karrio/providers/eshipper_xml/__init__.py b/modules/connectors/eshipper_xml/karrio/providers/eshipper_xml/__init__.py new file mode 100644 index 0000000000..27309cdb05 --- /dev/null +++ b/modules/connectors/eshipper_xml/karrio/providers/eshipper_xml/__init__.py @@ -0,0 +1,9 @@ +from karrio.providers.eshipper_xml.quote import parse_quote_reply, quote_request +from karrio.providers.eshipper_xml.shipping import ( + parse_shipping_reply, + shipping_request, +) +from karrio.providers.eshipper_xml.void_shipment import ( + shipment_cancel_request, + parse_shipment_cancel_reply, +) diff --git a/modules/connectors/eshipper_xml/karrio/providers/eshipper_xml/error.py b/modules/connectors/eshipper_xml/karrio/providers/eshipper_xml/error.py new file mode 100644 index 0000000000..8c512ac6cf --- /dev/null +++ b/modules/connectors/eshipper_xml/karrio/providers/eshipper_xml/error.py @@ -0,0 +1,19 @@ +from typing import List +from karrio.schemas.eshipper_xml.error import ErrorType +from karrio.core.models import Message +from karrio.core.utils import Element, XP +from karrio.providers.eshipper_xml.utils import Settings + + +def parse_error_response(response: Element, settings: Settings) -> List[Message]: + errors = XP.find("Error", response, ErrorType) + return [_extract_error(node, settings) for node in errors] + + +def _extract_error(error: ErrorType, settings: Settings) -> Message: + return Message( + code="Error", + carrier_name=settings.carrier_name, + carrier_id=settings.carrier_id, + message=error.Message, + ) diff --git a/modules/connectors/eshipper_xml/karrio/providers/eshipper_xml/quote.py b/modules/connectors/eshipper_xml/karrio/providers/eshipper_xml/quote.py new file mode 100644 index 0000000000..63d14d2a9a --- /dev/null +++ b/modules/connectors/eshipper_xml/karrio/providers/eshipper_xml/quote.py @@ -0,0 +1,175 @@ +from karrio.schemas.eshipper_xml.quote_reply import QuoteType +from karrio.schemas.eshipper_xml.quote_request import ( + EShipper, + QuoteRequestType, + FromType, + ToType, + PackagesType, + PackageType, +) + +import typing +import karrio.lib as lib +import karrio.core.models as models +import karrio.providers.eshipper_xml.error as provider_error +import karrio.providers.eshipper_xml.units as provider_units +import karrio.providers.eshipper_xml.utils as provider_utils + + +def parse_quote_reply( + _response: lib.Deserializable[lib.Element], + settings: provider_utils.Settings, +) -> typing.Tuple[typing.List[models.RateDetails], typing.List[models.Message]]: + response = _response.deserialize() + estimates = lib.find_element("Quote", response) + return ( + [_extract_rate(node, settings) for node in estimates], + provider_error.parse_error_response(response, settings), + ) + + +def _extract_rate( + node: lib.Element, settings: provider_utils.Settings +) -> models.RateDetails: + quote = lib.to_object(QuoteType, node) + rate_provider, service, service_name = provider_units.ShippingService.info( + quote.serviceId, quote.carrierId, quote.serviceName, quote.carrierName + ) + charges = [ + ("Base charge", quote.baseCharge), + ("Fuel surcharge", quote.fuelSurcharge), + *((surcharge.name, surcharge.amount) for surcharge in quote.Surcharge), + ] + + return models.RateDetails( + carrier_name=settings.carrier_name, + carrier_id=settings.carrier_id, + currency=quote.currency, + service=service, + total_charge=lib.to_decimal(quote.totalCharge), + transit_days=quote.transitDays, + extra_charges=[ + models.ChargeDetails( + name=name, + currency="CAD", + amount=lib.to_decimal(amount), + ) + for name, amount in charges + if amount + ], + meta=dict(rate_provider=rate_provider, service_name=service_name), + ) + + +def quote_request( + payload: models.RateRequest, + settings: provider_utils.Settings, +) -> lib.Serializable: + shipper = lib.to_address(payload.shipper) + recipient = lib.to_address(payload.recipient) + packages = lib.to_packages( + payload.parcels, + package_option_type=provider_units.ShippingOption, + required=["weight", "height", "width", "length"], + ) + options = lib.to_shipping_options( + payload.options, + package_options=packages.options, + initializer=provider_units.shipping_options_initializer, + ) + packaging_type = provider_units.PackagingType[ + packages.package_type or "eshipper_boxes" + ].value + packaging = ( + "Pallet" + if packaging_type in [provider_units.PackagingType.pallet.value] + else "Package" + ) + service = ( + lib.to_services(payload.services, provider_units.ShippingService).first + or provider_units.ShippingService.eshipper_all + ) + + request = EShipper( + username=settings.username, + password=settings.password, + version="3.0.0", + QuoteRequest=QuoteRequestType( + saturdayPickupRequired=options.eshipper_saturday_pickup_required.state, + homelandSecurity=options.eshipper_homeland_security.state, + pierCharge=None, + exhibitionConventionSite=options.eshipper_exhibition_convention_site.state, + militaryBaseDelivery=options.eshipper_military_base_delivery.state, + customsIn_bondFreight=options.eshipper_customs_in_bond_freight.state, + limitedAccess=options.eshipper_limited_access.state, + excessLength=options.eshipper_excess_length.state, + tailgatePickup=options.eshipper_tailgate_pickup.state, + residentialPickup=options.eshipper_residential_pickup.state, + crossBorderFee=None, + notifyRecipient=options.eshipper_notify_recipient.state, + singleShipment=options.eshipper_single_shipment.state, + tailgateDelivery=options.eshipper_tailgate_delivery.state, + residentialDelivery=options.eshipper_residential_delivery.state, + insuranceType=options.insurance.state is not None, + scheduledShipDate=None, + insideDelivery=options.eshipper_inside_delivery.state, + isSaturdayService=options.eshipper_is_saturday_service.state, + dangerousGoodsType=options.eshipper_dangerous_goods_type.state, + serviceId=service.value, + stackable=options.eshipper_stackable.state, + From=FromType( + id=None, + company=shipper.company_name or " ", + instructions=None, + email=shipper.email, + attention=shipper.person_name, + phone=shipper.phone_number, + tailgateRequired=None, + residential=shipper.residential, + address1=shipper.street, + address2=lib.text(shipper.address_line2), + city=shipper.city, + state=shipper.state_code, + zip=shipper.postal_code, + country=shipper.country_code, + ), + To=ToType( + id=None, + company=recipient.company_name or " ", + notifyRecipient=None, + instructions=None, + email=recipient.email, + attention=recipient.person_name, + phone=recipient.phone_number, + tailgateRequired=None, + residential=recipient.residential, + address1=recipient.street, + address2=lib.text(recipient.address_line2), + city=recipient.city, + state=recipient.state_code, + zip=recipient.postal_code, + country=recipient.country_code, + ), + COD=None, + Packages=PackagesType( + Package=[ + PackageType( + length=provider_utils.ceil(package.length.IN), + width=provider_utils.ceil(package.width.IN), + height=provider_utils.ceil(package.height.IN), + weight=provider_utils.ceil(package.weight.LB), + type_=packaging_type, + freightClass=package.parcel.freight_class, + nmfcCode=None, + insuranceAmount=package.options.insurance.state, + codAmount=package.options.cash_on_delivery.state, + description=package.parcel.description, + ) + for package in packages + ], + type_=packaging, + ), + ), + ) + + return lib.Serializable(request, provider_utils.standard_request_serializer) diff --git a/modules/connectors/eshipper_xml/karrio/providers/eshipper_xml/shipping.py b/modules/connectors/eshipper_xml/karrio/providers/eshipper_xml/shipping.py new file mode 100644 index 0000000000..0dfd110f4b --- /dev/null +++ b/modules/connectors/eshipper_xml/karrio/providers/eshipper_xml/shipping.py @@ -0,0 +1,313 @@ +from karrio.schemas.eshipper_xml.shipping_request import ( + EShipper, + ShippingRequestType, + FromType, + ToType, + PackagesType, + PackageType, + PaymentType as RequestPaymentType, + CODType, + CODReturnAddressType, + ContactType, + ReferenceType, + CustomsInvoiceType, + ItemType, + BillToType, + DutiesTaxesType, +) +from karrio.schemas.eshipper_xml.shipping_reply import ( + ShippingReplyType, + QuoteType, +) + +import typing +import karrio.lib as lib +import karrio.core.models as models +import karrio.providers.eshipper_xml.error as provider_error +import karrio.providers.eshipper_xml.units as provider_units +import karrio.providers.eshipper_xml.utils as provider_utils + + +def parse_shipping_reply( + _response: lib.Deserializable[lib.Element], + settings: provider_utils.Settings, +) -> typing.Tuple[models.ShipmentDetails, typing.List[models.Message]]: + response = _response.deserialize() + shipping_node = lib.find_element("ShippingReply", response, first=True) + shipment = ( + _extract_shipment(shipping_node, settings) + if shipping_node is not None + else None + ) + + return shipment, provider_error.parse_error_response(response, settings) + + +def _extract_shipment( + node: lib.Element, settings: provider_utils.Settings +) -> models.ShipmentDetails: + shipping = lib.to_object(ShippingReplyType, node) + quote: QuoteType = shipping.Quote + + tracking_number = getattr( + next(iter(shipping.Package), None), "trackingNumber", None + ) + rate_provider, service, service_name = provider_units.ShippingService.info( + quote.serviceId, + quote.carrierId, + quote.serviceName, + quote.carrierName, + ) + invoice = dict(invoice=shipping.CustomsInvoice) if shipping.CustomsInvoice else {} + charges = [ + ("Base charge", quote.baseCharge), + ("Fuel surcharge", quote.fuelSurcharge), + *((surcharge.name, surcharge.amount) for surcharge in quote.Surcharge), + ] + + return models.ShipmentDetails( + carrier_name=settings.carrier_name, + carrier_id=settings.carrier_id, + tracking_number=tracking_number, + shipment_identifier=shipping.Order.id, + selected_rate=( + models.RateDetails( + carrier_name=settings.carrier_name, + carrier_id=settings.carrier_id, + service=service, + currency=quote.currency, + total_charge=lib.to_decimal(quote.totalCharge), + transit_days=quote.transitDays, + extra_charges=[ + models.ChargeDetails( + name=name, + currency="CAD", + amount=lib.to_decimal(amount), + ) + for name, amount in charges + if amount + ], + meta=dict(rate_provider=rate_provider, service_name=service_name), + ) + if quote is not None + else None + ), + docs=models.Documents(label=shipping.Labels, **invoice), + meta=dict( + rate_provider=rate_provider, + service_name=service_name, + tracking_url=shipping.TrackingURL, + ), + ) + + +def shipping_request( + payload: models.ShipmentRequest, + settings: provider_utils.Settings, +) -> lib.Serializable: + shipper = lib.to_address(payload.shipper) + recipient = lib.to_address(payload.recipient) + service = provider_units.ShippingService.map(payload.service).value_or_key + packages = lib.to_packages( + payload.parcels, + package_option_type=provider_units.ShippingOption, + required=["weight", "height", "width", "length"], + ) + options = lib.to_shipping_options( + payload.options, + package_options=packages.options, + initializer=provider_units.shipping_options_initializer, + ) + + payment = payload.payment or models.Payment() + is_intl = shipper.country_code != recipient.country_code + customs = lib.to_customs_info( + payload.customs, + shipper=payload.shipper, + recipient=payload.recipient, + weight_unit=packages.weight_unit, + default_to=( + models.Customs( + commodities=( + packages.items + if any(packages.items) + else [ + models.Commodity( + quantity=1, + sku=f"000{index}", + weight=pkg.weight.value, + weight_unit=pkg.weight_unit.value, + description=pkg.parcel.content, + ) + for index, pkg in enumerate(packages, start=1) + ] + ) + ) + if is_intl + else None + ), + ) + + packaging_type = provider_units.PackagingType[ + packages.package_type or "eshipper_boxes" + ].value + packaging = ( + "Pallet" + if packaging_type in [provider_units.PackagingType.pallet.value] + else "Package" + ) + payment_type = ( + provider_units.PaymentType[payment.paid_by].value if payload.payment else None + ) + + request = EShipper( + username=settings.username, + password=settings.password, + version="3.0.0", + ShippingRequest=ShippingRequestType( + saturdayPickupRequired=options.eshipper_saturday_pickup_required.state, + homelandSecurity=options.eshipper_homeland_security.state, + pierCharge=None, + exhibitionConventionSite=options.eshipper_exhibition_convention_site.state, + militaryBaseDelivery=options.eshipper_military_base_delivery.state, + customsIn_bondFreight=options.eshipper_customs_in_bond_freight.state, + limitedAccess=options.eshipper_limited_access.state, + excessLength=options.eshipper_excess_length.state, + tailgatePickup=options.eshipper_tailgate_pickup.state, + residentialPickup=options.eshipper_residential_pickup.state, + crossBorderFee=None, + notifyRecipient=options.eshipper_notify_recipient.state, + singleShipment=options.eshipper_single_shipment.state, + tailgateDelivery=options.eshipper_tailgate_delivery.state, + residentialDelivery=options.eshipper_residential_delivery.state, + insuranceType=options.insurance.state is not None, + scheduledShipDate=None, + insideDelivery=options.eshipper_inside_delivery.state, + isSaturdayService=options.eshipper_is_saturday_service.state, + dangerousGoodsType=options.eshipper_dangerous_goods_type.state, + serviceId=service, + stackable=options.eshipper_stackable.state, + From=FromType( + id=None, + company=shipper.company_name, + instructions=None, + email=shipper.email, + attention=shipper.person_name, + phone=shipper.phone_number, + tailgateRequired=None, + residential=shipper.residential, + address1=shipper.street, + address2=lib.text(shipper.address_line2), + city=shipper.city, + state=shipper.state_code, + zip=shipper.postal_code, + country=shipper.country_code, + ), + To=ToType( + id=None, + company=recipient.company_name, + notifyRecipient=None, + instructions=None, + email=recipient.email, + attention=recipient.person_name, + phone=recipient.phone_number, + tailgateRequired=None, + residential=recipient.residential, + address1=recipient.street, + address2=lib.text(recipient.address_line2), + city=recipient.city, + state=recipient.state_code, + zip=recipient.postal_code, + country=recipient.country_code, + ), + COD=( + CODType( + paymentType=provider_units.PaymentType.recipient.value, + CODReturnAddress=CODReturnAddressType( + codCompany=recipient.company_name, + codName=recipient.person_name, + codAddress1=recipient.street, + codCity=recipient.city, + codStateCode=recipient.state_code, + codZip=recipient.postal_code, + codCountry=recipient.country_code, + ), + ) + if options.cash_on_delivery.state is not None + else None + ), + Packages=PackagesType( + Package=[ + PackageType( + length=provider_utils.ceil(package.length.IN), + width=provider_utils.ceil(package.width.IN), + height=provider_utils.ceil(package.height.IN), + weight=provider_utils.ceil(package.weight.LB), + type_=packaging_type, + freightClass=package.parcel.freight_class, + nmfcCode=None, + insuranceAmount=package.options.insurance.state, + codAmount=package.options.cash_on_delivery.state, + description=package.parcel.description, + ) + for package in packages + ], + type_=packaging, + ), + Payment=(RequestPaymentType(type_=payment_type) if payment_type else None), + Reference=( + [ReferenceType(name="REF", code=payload.reference)] + if payload.reference != "" + else None + ), + CustomsInvoice=( + CustomsInvoiceType( + Currency=getattr(customs.duty, "currency", None), + brokerName=None, + contactCompany=customs.duty_billing_address.company_name, + shipperTaxID=customs.duty_billing_address.tax_id, + contactName=customs.duty_billing_address.person_name, + contactPhone=customs.duty_billing_address.phone_number, + DutiesTaxes=DutiesTaxesType( + consigneeAccount=customs.duty.account_number, + sedNumber=None, + dutiable=("No" if packages.is_document else "Yes"), + billTo=provider_units.DutyBillToType.map( + customs.duty.paid_by or "shipper" + ).value, + ), + InBondManifest=None, + BillTo=BillToType( + company=customs.duty_billing_address.company_name, + name=customs.duty_billing_address.person_name, + address1=customs.duty_billing_address.address_line, + city=customs.duty_billing_address.city, + state=customs.duty_billing_address.state_code, + zip=customs.duty_billing_address.postal_code, + country=customs.duty_billing_address.country_code, + ), + Contact=ContactType( + name=customs.duty_billing_address.person_name, + phone=customs.duty_billing_address.phone_number, + ), + Item=[ + ItemType( + code=item.hs_code or "0000", + description=lib.text( + item.description or item.title or "item" + ), + originCountry=(item.origin_country or shipper.country_code), + unitPrice=item.value_amount, + quantity=item.quantity or 1, + skuCode=item.sku, + ) + for item in customs.commodities + ], + ) + if payload.customs + else None + ), + ), + ) + + return lib.Serializable(request, provider_utils.standard_request_serializer) diff --git a/modules/connectors/eshipper_xml/karrio/providers/eshipper_xml/units.py b/modules/connectors/eshipper_xml/karrio/providers/eshipper_xml/units.py new file mode 100644 index 0000000000..c6b8e86185 --- /dev/null +++ b/modules/connectors/eshipper_xml/karrio/providers/eshipper_xml/units.py @@ -0,0 +1,264 @@ +import re +import karrio.lib as lib +import karrio.core.units as units + + +class PackagingType(lib.StrEnum): + eshipper_pallet = "Pallet" + eshipper_drum = "Drum" + eshipper_boxes = "Boxes" + eshipper_rolls = "Rolls" + eshipper_pipes_tubes = "Pipes/Tubes" + eshipper_bales = "Bales" + eshipper_bags = "Bags" + eshipper_cylinder = "Cylinder" + eshipper_pails = "Pails" + eshipper_reels = "Reels" + + eshipper_envelope = "Envelope" + eshipper_courier_pak = "Courier Pak" + eshipper_package = "Package" + + """ Unified Packaging type mapping """ + envelope = eshipper_envelope + pak = eshipper_courier_pak + tube = eshipper_pipes_tubes + pallet = eshipper_pallet + small_box = eshipper_boxes + medium_box = eshipper_boxes + your_packaging = eshipper_package + + +class PaymentType(lib.StrEnum): + check = "Check" + receiver = "Receiver" + shipper = "Shipper" + third_party = "3rd Party" + + """ Unified payment type mapping """ + sender = shipper + recipient = receiver + + +class DutyBillToType(lib.StrEnum): + receiver = "Receiver" + shipper = "Shipper" + consignee_account = "Consignee Account" + + """ Unified payment type mapping """ + sender = shipper + recipient = receiver + third_party = consignee_account + + +class ShippingService(lib.StrEnum): + eshipper_all = "0" + eshipper_fedex_priority = "1" + eshipper_fedex_first_overnight = "2" + eshipper_fedex_ground = "3" + eshipper_fedex_standard_overnight = "28" + eshipper_fedex_2nd_day = "29" + eshipper_fedex_express_saver = "30" + eshipper_fedex_international_economy = "35" + eshipper_purolator_air = "4" + eshipper_purolator_air_9_am = "5" + eshipper_purolator_air_10_30 = "6" + eshipper_purolator_letter = "7" + eshipper_purolator_letter_9_am = "8" + eshipper_purolator_letter_10_30 = "9" + eshipper_purolator_pak = "10" + eshipper_purolator_pak_9_am = "11" + eshipper_purolator_pak_10_30 = "12" + eshipper_purolator_ground = "13" + eshipper_purolator_ground_9_am = "19" + eshipper_purolator_ground_10_30 = "20" + eshipper_canada_worldwide_same_day = "14" + eshipper_canada_worldwide_next_flight_out = "15" + eshipper_canada_worldwide_air_freight = "16" + eshipper_canada_worldwide_ltl = "17" + eshipper_dhl_express_worldwide = "101" + eshipper_dhl_express_12_pm = "103" + eshipper_dhl_express_10_30_am = "102" + eshipper_dhl_esi_export = "104" + eshipper_dhl_international_express = "106" + eshipper_ups_express_next_day_air = "600" + eshipper_ups_expedited_second_day_air = "601" + eshipper_ups_worldwide_express = "602" + eshipper_ups_worldwide_expedited = "603" + eshipper_ups_standard_ground = "604" + eshipper_ups_express_early_am_next_day_air_early_am = "605" + eshipper_ups_three_day_select = "606" + eshipper_ups_saver = "607" + eshipper_ups_ground = "608" + eshipper_ups_next_day_saver = "609" + eshipper_ups_worldwide_express_plus = "610" + eshipper_ups_second_day_air_am = "611" + eshipper_canada_post_priority = "500" + eshipper_canada_post_xpresspost = "501" + eshipper_canada_post_expedited = "502" + eshipper_canada_post_regular = "503" + eshipper_canada_post_xpresspost_usa = "504" + eshipper_canada_post_xpresspost_intl = "505" + eshipper_canada_post_air_parcel_intl = "506" + eshipper_canada_post_surface_parcel_intl = "507" + eshipper_canada_post_expedited_parcel_usa = "508" + eshipper_tst_ltl = "1100" + eshipper_ltl_chicago_suburban_express = "1500" + eshipper_ltl_fedex_freight_east = "1501" + eshipper_ltl_fedex_freight_west = "1502" + eshipper_ltl_mid_states_express = "1503" + eshipper_ltl_new_england_motor_freight = "1504" + eshipper_ltl_new_penn = "1505" + eshipper_ltl_oak_harbor = "1506" + eshipper_ltl_pitt_ohio = "1507" + eshipper_ltl_r_l_carriers = "1508" + eshipper_ltl_saia = "1509" + eshipper_ltl_usf_reddaway = "1510" + eshipper_ltl_vitran_express = "1511" + eshipper_ltl_wilson_trucking = "1512" + eshipper_ltl_yellow_transportation = "1513" + eshipper_ltl_roadway = "1514" + eshipper_ltl_fedex_national = "1515" + eshipper_wilson_trucking_tfc = "1800" + eshipper_aaa_cooper_transportation = "1801" + eshipper_roadrunner_dawes = "1802" + eshipper_new_england_motor_freight = "1803" + eshipper_new_penn_motor_express = "1804" + eshipper_dayton_freight = "1805" + eshipper_southeastern_freightway = "1806" + eshipper_saia_inc = "1807" + eshipper_conway = "1808" + eshipper_roadway = "1809" + eshipper_usf_reddaway = "1810" + eshipper_usf_holland = "1811" + eshipper_dependable_highway_express = "1812" + eshipper_day_and_ross = "1813" + eshipper_day_and_ross_r_and_l = "1814" + eshipper_ups = "1815" + eshipper_aaa_cooper = "1816" + eshipper_ama_transportation = "1817" + eshipper_averitt_express = "1818" + eshipper_central_freight = "1819" + eshipper_conway_us = "1820" + eshipper_dayton = "1821" + eshipper_drug_transport = "1822" + eshipper_estes = "1823" + eshipper_land_air_express = "1824" + eshipper_fedex_west = "1825" + eshipper_fedex_national = "1826" + eshipper_usf_holland_us = "1827" + eshipper_lakeville_m_express = "1828" + eshipper_milan_express = "1829" + eshipper_nebraska_transport = "1830" + eshipper_new_england = "1831" + eshipper_new_penn = "1832" + eshipper_a_duie_pyle = "1833" + eshipper_roadway_us = "1834" + eshipper_usf_reddaway_us = "1835" + eshipper_rhody_transportation = "1836" + eshipper_saia_motor_freight = "1837" + eshipper_southeastern_frgt = "1838" + eshipper_pitt_ohio = "1839" + eshipper_ward = "1840" + eshipper_wilson = "1841" + eshipper_chi_cargo = "1842" + eshipper_tax_air = "1843" + eshipper_fedex_east = "1844" + eshipper_central_transport = "1845" + eshipper_roadrunner = "1846" + eshipper_r_and_l_carriers = "1847" + eshipper_estes_us = "1848" + eshipper_yrc_roadway = "1849" + eshipper_central_transport_us = "1850" + eshipper_absolute_transportation_services = "1851" + eshipper_blue_sky_express = "1852" + eshipper_galasso_trucking = "1853" + eshipper_griley_air_freight = "1854" + eshipper_jet_transportation = "1855" + eshipper_metro_transportation_logistics = "1856" + eshipper_oak_harbor = "1857" + eshipper_stream_links_express = "1858" + eshipper_tiffany_trucking = "1859" + eshipper_ups_freight = "1860" + eshipper_roadrunner_us = "1861" + eshipper_global_mail_parcel_priority = "3500" + eshipper_global_mail_parcel_standard = "3501" + eshipper_global_mail_packet_plus_priority = "3502" + eshipper_global_mail_packet_priority = "3503" + eshipper_global_mail_packet_standard = "3504" + eshipper_global_mail_business_priority = "3505" + eshipper_global_mail_business_standard = "3506" + eshipper_global_mail_parcel_direct_priority = "3507" + eshipper_global_mail_parcel_direct_standard = "3508" + eshipper_canpar_ground = "4500" + eshipper_canpar_select_parcel = "4504" + eshipper_canpar_express_parcel = "4507" + eshipper_fleet_optics_ground = "5601" + + @classmethod + def info(cls, serviceId, carrierId, serviceName, carrierName): + carrier_name = CARRIER_IDS.get(str(carrierId)) or carrierName + service = cls.map(str(serviceId)) + formatted_name = re.sub( + r"((?<=[a-z])[A-Z]|(? units.Options: + """ + Apply default values to the given options. + """ + _options = options.copy() + + if package_options is not None: + _options.update(package_options.content) + + return units.ShippingOptions(_options, ShippingOption) diff --git a/modules/connectors/eshipper_xml/karrio/providers/eshipper_xml/utils.py b/modules/connectors/eshipper_xml/karrio/providers/eshipper_xml/utils.py new file mode 100644 index 0000000000..c13f692cbb --- /dev/null +++ b/modules/connectors/eshipper_xml/karrio/providers/eshipper_xml/utils.py @@ -0,0 +1,37 @@ +import math +from typing import Optional +from karrio.core import Settings as BaseSettings +from karrio.core.utils import XP + + +class Settings(BaseSettings): + """eshipper_xml connection settings.""" + + username: str + password: str + + account_country_code: str = None + metadata: dict = {} + config: dict = {} + + @property + def server_url(self): + return ( + "http://test.eshipper.com/rpc2" + if self.test_mode + else "http://web.eshipper.com/rpc2" + ) + + @property + def carrier_name(self): + return "eshipper_xml" + + +def standard_request_serializer(request) -> str: + return XP.export(request, namespacedef_='xmlns="http://www.eshipper.net/XMLSchema"') + + +def ceil(value: Optional[float]) -> Optional[int]: + if value is None: + return None + return math.ceil(value) diff --git a/modules/connectors/eshipper_xml/karrio/providers/eshipper_xml/void_shipment.py b/modules/connectors/eshipper_xml/karrio/providers/eshipper_xml/void_shipment.py new file mode 100644 index 0000000000..50a1d69eed --- /dev/null +++ b/modules/connectors/eshipper_xml/karrio/providers/eshipper_xml/void_shipment.py @@ -0,0 +1,50 @@ +from typing import List, Tuple +from karrio.schemas.eshipper_xml.shipment_cancel_request import ( + ShipmentCancelRequestType, + EShipper, + OrderType, +) +from karrio.core.models import ShipmentCancelRequest, ConfirmationDetails, Message +from karrio.core.utils import ( + Element, + Serializable, +) +from karrio.providers.eshipper_xml.error import parse_error_response +from karrio.providers.eshipper_xml.utils import Settings, standard_request_serializer +import karrio.lib as lib + + +def parse_shipment_cancel_reply( + _response: lib.Deserializable[Element], + settings: Settings, +) -> Tuple[ConfirmationDetails, List[Message]]: + response = _response.deserialize() + errors = parse_error_response(response, settings) + success = len(errors) == 0 + confirmation: ConfirmationDetails = ( + ConfirmationDetails( + carrier_id=settings.carrier_id, + carrier_name=settings.carrier_name, + success=success, + operation="Cancel Shipment", + ) + if success + else None + ) + + return confirmation, errors + + +def shipment_cancel_request( + payload: ShipmentCancelRequest, settings: Settings +) -> Serializable: + request = EShipper( + username=settings.username, + password=settings.password, + version="3.0.0", + ShipmentCancelRequest=ShipmentCancelRequestType( + Order=OrderType(orderId=payload.shipment_identifier) + ), + ) + + return Serializable(request, standard_request_serializer) diff --git a/modules/connectors/eshipper_xml/karrio/schemas/eshipper_xml/__init__.py b/modules/connectors/eshipper_xml/karrio/schemas/eshipper_xml/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/modules/connectors/eshipper_xml/karrio/schemas/eshipper_xml/error.py b/modules/connectors/eshipper_xml/karrio/schemas/eshipper_xml/error.py new file mode 100644 index 0000000000..fb46eaeda6 --- /dev/null +++ b/modules/connectors/eshipper_xml/karrio/schemas/eshipper_xml/error.py @@ -0,0 +1,1859 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Mon Oct 24 11:17:21 2022 by generateDS.py version 2.41.1. +# Python 3.10.6 (main, Aug 30 2022, 05:12:36) [Clang 13.1.6 (clang-1316.0.21.2.5)] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio.schemas.eshipper_xml/error.py') +# +# Command line arguments: +# ./vendor/schemas/error.xsd +# +# Command line: +# /Users/danielkobina/Workspace/project/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio.schemas.eshipper_xml/error.py" ./vendor/schemas/error.xsd +# +# Current working directory (os.getcwd()): +# eshipper_xml +# + +import sys + +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import ( + GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_, + ) +except ModulenotfoundExp_: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_: + + class GdsCollector_(object): + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_: + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r"(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$") + + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return self.__name + + def dst(self, dt): + return None + + def __str__(self): + settings = { + "str_pretty_print": True, + "str_indent_level": 0, + "str_namespaceprefix": "", + "str_name": self.__class__.__name__, + "str_namespacedefs": "", + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings["str_indent_level"], + pretty_print=settings["str_pretty_print"], + namespaceprefix_=settings["str_namespaceprefix"], + name_=settings["str_name"], + namespacedef_=settings["str_namespacedefs"], + ) + strval = output.getvalue() + output.close() + return strval + + def gds_format_string(self, input_data, input_name=""): + return input_data + + def gds_parse_string(self, input_data, node=None, input_name=""): + return input_data + + def gds_validate_string(self, input_data, node=None, input_name=""): + if not input_data: + return "" + else: + return input_data + + def gds_format_base64(self, input_data, input_name=""): + return base64.b64encode(input_data).decode("ascii") + + def gds_validate_base64(self, input_data, node=None, input_name=""): + return input_data + + def gds_format_integer(self, input_data, input_name=""): + return "%d" % int(input_data) + + def gds_parse_integer(self, input_data, node=None, input_name=""): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, "Requires integer value: %s" % exp) + return ival + + def gds_validate_integer(self, input_data, node=None, input_name=""): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires integer value") + return value + + def gds_format_integer_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_integer_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, "Requires sequence of integer values") + return values + + def gds_format_float(self, input_data, input_name=""): + return ("%.15f" % float(input_data)).rstrip("0") + + def gds_parse_float(self, input_data, node=None, input_name=""): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, "Requires float or double value: %s" % exp) + return fval_ + + def gds_validate_float(self, input_data, node=None, input_name=""): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires float value") + return value + + def gds_format_float_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_float_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, "Requires sequence of float values") + return values + + def gds_format_decimal(self, input_data, input_name=""): + return_value = "%s" % input_data + if "." in return_value: + return_value = return_value.rstrip("0") + if return_value.endswith("."): + return_value = return_value.rstrip(".") + return return_value + + def gds_parse_decimal(self, input_data, node=None, input_name=""): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires decimal value") + return decimal_value + + def gds_validate_decimal(self, input_data, node=None, input_name=""): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires decimal value") + return value + + def gds_format_decimal_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return " ".join([self.gds_format_decimal(item) for item in input_data]) + + def gds_validate_decimal_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, "Requires sequence of decimal values") + return values + + def gds_format_double(self, input_data, input_name=""): + return "%s" % input_data + + def gds_parse_double(self, input_data, node=None, input_name=""): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, "Requires double or float value: %s" % exp) + return fval_ + + def gds_validate_double(self, input_data, node=None, input_name=""): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires double or float value") + return value + + def gds_format_double_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_double_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, "Requires sequence of double or float values" + ) + return values + + def gds_format_boolean(self, input_data, input_name=""): + return ("%s" % input_data).lower() + + def gds_parse_boolean(self, input_data, node=None, input_name=""): + input_data = input_data.strip() + if input_data in ("true", "1"): + bval = True + elif input_data in ("false", "0"): + bval = False + else: + raise_parse_error(node, "Requires boolean value") + return bval + + def gds_validate_boolean(self, input_data, node=None, input_name=""): + if input_data not in ( + True, + 1, + False, + 0, + ): + raise_parse_error( + node, "Requires boolean value " "(one of True, 1, False, 0)" + ) + return input_data + + def gds_format_boolean_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_boolean_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in ( + True, + 1, + False, + 0, + ): + raise_parse_error( + node, + "Requires sequence of boolean values " + "(one of True, 1, False, 0)", + ) + return values + + def gds_validate_datetime(self, input_data, node=None, input_name=""): + return input_data + + def gds_format_datetime(self, input_data, input_name=""): + if input_data.microsecond == 0: + _svalue = "%04d-%02d-%02dT%02d:%02d:%02d" % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = "%04d-%02d-%02dT%02d:%02d:%02d.%s" % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ("%f" % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += "Z" + else: + if total_seconds < 0: + _svalue += "-" + total_seconds *= -1 + else: + _svalue += "+" + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += "{0:02d}:{1:02d}".format(hours, minutes) + return _svalue + + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == "Z": + tz = GeneratedsSuper._FixedOffsetTZ(0, "UTC") + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(":") + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == "-": + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ(tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split(".") + if len(time_parts) > 1: + micro_seconds = int(float("0." + time_parts[1]) * 1000000) + input_data = "%s.%s" % ( + time_parts[0], + "{}".format(micro_seconds).rjust(6, "0"), + ) + dt = datetime_.datetime.strptime(input_data, "%Y-%m-%dT%H:%M:%S.%f") + else: + dt = datetime_.datetime.strptime(input_data, "%Y-%m-%dT%H:%M:%S") + dt = dt.replace(tzinfo=tz) + return dt + + def gds_validate_date(self, input_data, node=None, input_name=""): + return input_data + + def gds_format_date(self, input_data, input_name=""): + _svalue = "%04d-%02d-%02d" % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += "Z" + else: + if total_seconds < 0: + _svalue += "-" + total_seconds *= -1 + else: + _svalue += "+" + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += "{0:02d}:{1:02d}".format(hours, minutes) + except AttributeError: + pass + return _svalue + + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == "Z": + tz = GeneratedsSuper._FixedOffsetTZ(0, "UTC") + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(":") + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == "-": + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ(tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, "%Y-%m-%d") + dt = dt.replace(tzinfo=tz) + return dt.date() + + def gds_validate_time(self, input_data, node=None, input_name=""): + return input_data + + def gds_format_time(self, input_data, input_name=""): + if input_data.microsecond == 0: + _svalue = "%02d:%02d:%02d" % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = "%02d:%02d:%02d.%s" % ( + input_data.hour, + input_data.minute, + input_data.second, + ("%f" % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += "Z" + else: + if total_seconds < 0: + _svalue += "-" + total_seconds *= -1 + else: + _svalue += "+" + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += "{0:02d}:{1:02d}".format(hours, minutes) + return _svalue + + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == "Z": + tz = GeneratedsSuper._FixedOffsetTZ(0, "UTC") + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(":") + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == "-": + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ(tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split(".")) > 1: + dt = datetime_.datetime.strptime(input_data, "%H:%M:%S.%f") + else: + dt = datetime_.datetime.strptime(input_data, "%H:%M:%S") + dt = dt.replace(tzinfo=tz) + return dt.time() + + def gds_check_cardinality_( + self, value, input_name, min_occurs=0, max_occurs=1, required=None + ): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None: + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_() + ) + ) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), min_occurs, length + ) + ) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), max_occurs, length + ) + ) + + def gds_validate_builtin_ST_( + self, + validator, + value, + input_name, + min_occurs=None, + max_occurs=None, + required=None, + ): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + + def gds_validate_defined_ST_( + self, + validator, + value, + input_name, + min_occurs=None, + max_occurs=None, + required=None, + ): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + + def gds_str_lower(self, instring): + return instring.lower() + + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = "/".join(path_list) + return path + + Tag_strip_pattern_ = re_.compile(r"\{.*\}") + + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub("", node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if "xsi" in node.nsmap: + classname = node.get("{%s}type" % node.nsmap["xsi"]) + if classname is not None: + names = classname.split(":") + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = "utf-8" + return instring.encode(encoding) + else: + return instring + + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode("utf8") + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + + def __eq__(self, other): + def excl_select_objs_(obj): + return obj[0] != "parent_object_" and obj[0] != "gds_collector_" + + if type(self) != type(other): + return False + return all( + x == y + for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()), + ) + ) + + def __ne__(self, other): + return not self.__eq__(other) + + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + + def gds_get_node_lineno_(self): + if ( + hasattr(self, "gds_elementtree_node_") + and self.gds_elementtree_node_ is not None + ): + return " near line {}".format(self.gds_elementtree_node_.sourceline) + else: + return "" + + def getSubclassFromModule_(module, class_): + """Get the subclass of a class from a specific module.""" + name = class_.__name__ + "Sub" + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = "" +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r"({.*})?(.*)") +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r"{(.*)}(.*)") +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(" ") + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return "" + s1 = isinstance(inStr, BaseStrType_) and inStr or "%s" % inStr + s2 = "" + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos : mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start() : mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace("&", "&") + s1 = s1.replace("<", "<") + s1 = s1.replace(">", ">") + return s1 + + +def quote_attrib(inStr): + s1 = isinstance(inStr, BaseStrType_) and inStr or "%s" % inStr + s1 = s1.replace("&", "&") + s1 = s1.replace("<", "<") + s1 = s1.replace(">", ">") + s1 = s1.replace("\n", " ") + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find("\n") == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find("\n") == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = "" + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(":") + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == "xml": + namespace = "http://www.w3.org/XML/1998/namespace" + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get( + "{%s}%s" + % ( + namespace, + name, + ) + ) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = "%s (element %s/line %d)" % ( + msg, + node.tag, + node.sourceline, + ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + + def getCategory(self): + return self.category + + def getContenttype(self, content_type): + return self.content_type + + def getValue(self): + return self.value + + def getName(self): + return self.name + + def export(self, outfile, level, name, namespace, pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, pretty_print=pretty_print + ) + + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write("<%s>%s" % (self.name, self.value, self.name)) + elif ( + self.content_type == MixedContainer.TypeInteger + or self.content_type == MixedContainer.TypeBoolean + ): + outfile.write("<%s>%d" % (self.name, self.value, self.name)) + elif ( + self.content_type == MixedContainer.TypeFloat + or self.content_type == MixedContainer.TypeDecimal + ): + outfile.write("<%s>%f" % (self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write("<%s>%g" % (self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write( + "<%s>%s" % (self.name, base64.b64encode(self.value), self.name) + ) + + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement(element, "%s" % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif ( + self.content_type == MixedContainer.TypeInteger + or self.content_type == MixedContainer.TypeBoolean + ): + text = "%d" % self.value + elif ( + self.content_type == MixedContainer.TypeFloat + or self.content_type == MixedContainer.TypeDecimal + ): + text = "%f" % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = "%g" % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = "%s" % base64.b64encode(self.value) + return text + + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' + % (self.category, self.content_type, self.name, self.value) + ) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' + % (self.category, self.content_type, self.name, self.value) + ) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' + % ( + self.category, + self.content_type, + self.name, + ) + ) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(")\n") + + +class MemberSpec_(object): + def __init__( + self, + name="", + data_type="", + container=0, + optional=0, + child_attrs=None, + choice=None, + ): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + + def set_name(self, name): + self.name = name + + def get_name(self): + return self.name + + def set_data_type(self, data_type): + self.data_type = data_type + + def get_data_type_chain(self): + return self.data_type + + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return "xs:string" + else: + return self.data_type + + def set_container(self, container): + self.container = container + + def get_container(self): + return self.container + + def set_child_attrs(self, child_attrs): + self.child_attrs = child_attrs + + def get_child_attrs(self): + return self.child_attrs + + def set_choice(self, choice): + self.choice = choice + + def get_choice(self): + return self.choice + + def set_optional(self, optional): + self.optional = optional + + def get_optional(self): + return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Data representation classes. +# + + +class Freightcom(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__(self, version=None, ErrorReply=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.version = _cast(None, version) + self.version_nsprefix_ = None + self.ErrorReply = ErrorReply + self.ErrorReply_nsprefix_ = None + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, Freightcom) + if subclass is not None: + return subclass(*args_, **kwargs_) + if Freightcom.subclass: + return Freightcom.subclass(*args_, **kwargs_) + else: + return Freightcom(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_ErrorReply(self): + return self.ErrorReply + + def set_ErrorReply(self, ErrorReply): + self.ErrorReply = ErrorReply + + def get_version(self): + return self.version + + def set_version(self, version): + self.version = version + + def _hasContent(self): + if self.ErrorReply is not None: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Freightcom", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Freightcom") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Freightcom": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Freightcom" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Freightcom", + pretty_print=pretty_print, + ) + showIndent(outfile, level, pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Freightcom" + ): + if self.version is not None and "version" not in already_processed: + already_processed.add("version") + outfile.write( + " version=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.version), input_name="version" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Freightcom", + fromsubclass_=False, + pretty_print=True, + ): + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.ErrorReply is not None: + namespaceprefix_ = ( + self.ErrorReply_nsprefix_ + ":" + if (UseCapturedNS_ and self.ErrorReply_nsprefix_) + else "" + ) + self.ErrorReply.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="ErrorReply", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("version", node) + if value is not None and "version" not in already_processed: + already_processed.add("version") + self.version = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "ErrorReply": + obj_ = ErrorReplyType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ErrorReply = obj_ + obj_.original_tagname_ = "ErrorReply" + + +# end class Freightcom + + +class ErrorReplyType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__(self, Error=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.Error = Error + self.Error_nsprefix_ = None + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, ErrorReplyType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ErrorReplyType.subclass: + return ErrorReplyType.subclass(*args_, **kwargs_) + else: + return ErrorReplyType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_Error(self): + return self.Error + + def set_Error(self, Error): + self.Error = Error + + def _hasContent(self): + if self.Error is not None: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ErrorReplyType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ErrorReplyType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ErrorReplyType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ErrorReplyType" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ErrorReplyType", + pretty_print=pretty_print, + ) + showIndent(outfile, level, pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ErrorReplyType", + ): + pass + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ErrorReplyType", + fromsubclass_=False, + pretty_print=True, + ): + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.Error is not None: + namespaceprefix_ = ( + self.Error_nsprefix_ + ":" + if (UseCapturedNS_ and self.Error_nsprefix_) + else "" + ) + self.Error.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Error", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + pass + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "Error": + obj_ = ErrorType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Error = obj_ + obj_.original_tagname_ = "Error" + + +# end class ErrorReplyType + + +class ErrorType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__(self, Message=None, valueOf_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.Message = _cast(None, Message) + self.Message_nsprefix_ = None + self.valueOf_ = valueOf_ + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, ErrorType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ErrorType.subclass: + return ErrorType.subclass(*args_, **kwargs_) + else: + return ErrorType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_Message(self): + return self.Message + + def set_Message(self, Message): + self.Message = Message + + def get_valueOf_(self): + return self.valueOf_ + + def set_valueOf_(self, valueOf_): + self.valueOf_ = valueOf_ + + def _hasContent(self): + if 1 if type(self.valueOf_) in [int, float] else self.valueOf_: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ErrorType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ErrorType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ErrorType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ErrorType" + ) + if self._hasContent(): + outfile.write(">") + outfile.write(self.convert_unicode(self.valueOf_)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ErrorType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="ErrorType" + ): + if self.Message is not None and "Message" not in already_processed: + already_processed.add("Message") + outfile.write( + " Message=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.Message), input_name="Message" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ErrorType", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("Message", node) + if value is not None and "Message" not in already_processed: + already_processed.add("Message") + self.Message = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class ErrorType + + +GDSClassesMapping = {} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + """Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + """ + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = " ".join( + ['xmlns:{}="{}"'.format(prefix, uri) for prefix, uri in nsmap.items()] + ) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = "Freightcom" + rootClass = Freightcom + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, namespacedef_=namespacedefs, pretty_print=True + ) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree( + inFileName, + silence=False, + print_warnings=True, + mapping=None, + reverse_mapping=None, + nsmap=None, +): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = "Freightcom" + rootClass = Freightcom + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, + name_=rootTag, + mapping_=mapping, + reverse_mapping_=reverse_mapping, + nsmap_=nsmap, + ) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, xml_declaration=True, encoding="utf-8" + ) + sys.stdout.write(str(content)) + sys.stdout.write("\n") + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + """Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + """ + parser = None + rootNode = parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = "Freightcom" + rootClass = Freightcom + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export(sys.stdout, 0, name_=rootTag, namespacedef_="") + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = "Freightcom" + rootClass = Freightcom + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write("#from error import *\n\n") + sys.stdout.write("import error as model_\n\n") + sys.stdout.write("rootObj = model_.rootClass(\n") + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(")\n") + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == "__main__": + # import pdb; pdb.set_trace() + main() + +RenameMappings_ = {} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {"http://www.freightcom.net/xml/XMLSchema": []} + +__all__ = ["ErrorReplyType", "ErrorType", "Freightcom"] diff --git a/modules/connectors/eshipper_xml/karrio/schemas/eshipper_xml/quote_reply.py b/modules/connectors/eshipper_xml/karrio/schemas/eshipper_xml/quote_reply.py new file mode 100644 index 0000000000..2bdd418f35 --- /dev/null +++ b/modules/connectors/eshipper_xml/karrio/schemas/eshipper_xml/quote_reply.py @@ -0,0 +1,2362 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Mon Oct 24 11:17:21 2022 by generateDS.py version 2.41.1. +# Python 3.10.6 (main, Aug 30 2022, 05:12:36) [Clang 13.1.6 (clang-1316.0.21.2.5)] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio.schemas.eshipper_xml/quote_reply.py') +# +# Command line arguments: +# ./vendor/schemas/quote_reply.xsd +# +# Command line: +# /Users/danielkobina/Workspace/project/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio.schemas.eshipper_xml/quote_reply.py" ./vendor/schemas/quote_reply.xsd +# +# Current working directory (os.getcwd()): +# eshipper_xml +# + +import sys + +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import ( + GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_, + ) +except ModulenotfoundExp_: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_: + + class GdsCollector_(object): + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_: + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r"(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$") + + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return self.__name + + def dst(self, dt): + return None + + def __str__(self): + settings = { + "str_pretty_print": True, + "str_indent_level": 0, + "str_namespaceprefix": "", + "str_name": self.__class__.__name__, + "str_namespacedefs": "", + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings["str_indent_level"], + pretty_print=settings["str_pretty_print"], + namespaceprefix_=settings["str_namespaceprefix"], + name_=settings["str_name"], + namespacedef_=settings["str_namespacedefs"], + ) + strval = output.getvalue() + output.close() + return strval + + def gds_format_string(self, input_data, input_name=""): + return input_data + + def gds_parse_string(self, input_data, node=None, input_name=""): + return input_data + + def gds_validate_string(self, input_data, node=None, input_name=""): + if not input_data: + return "" + else: + return input_data + + def gds_format_base64(self, input_data, input_name=""): + return base64.b64encode(input_data).decode("ascii") + + def gds_validate_base64(self, input_data, node=None, input_name=""): + return input_data + + def gds_format_integer(self, input_data, input_name=""): + return "%d" % int(input_data) + + def gds_parse_integer(self, input_data, node=None, input_name=""): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, "Requires integer value: %s" % exp) + return ival + + def gds_validate_integer(self, input_data, node=None, input_name=""): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires integer value") + return value + + def gds_format_integer_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_integer_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, "Requires sequence of integer values") + return values + + def gds_format_float(self, input_data, input_name=""): + return ("%.15f" % float(input_data)).rstrip("0") + + def gds_parse_float(self, input_data, node=None, input_name=""): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, "Requires float or double value: %s" % exp) + return fval_ + + def gds_validate_float(self, input_data, node=None, input_name=""): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires float value") + return value + + def gds_format_float_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_float_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, "Requires sequence of float values") + return values + + def gds_format_decimal(self, input_data, input_name=""): + return_value = "%s" % input_data + if "." in return_value: + return_value = return_value.rstrip("0") + if return_value.endswith("."): + return_value = return_value.rstrip(".") + return return_value + + def gds_parse_decimal(self, input_data, node=None, input_name=""): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires decimal value") + return decimal_value + + def gds_validate_decimal(self, input_data, node=None, input_name=""): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires decimal value") + return value + + def gds_format_decimal_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return " ".join([self.gds_format_decimal(item) for item in input_data]) + + def gds_validate_decimal_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, "Requires sequence of decimal values") + return values + + def gds_format_double(self, input_data, input_name=""): + return "%s" % input_data + + def gds_parse_double(self, input_data, node=None, input_name=""): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, "Requires double or float value: %s" % exp) + return fval_ + + def gds_validate_double(self, input_data, node=None, input_name=""): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires double or float value") + return value + + def gds_format_double_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_double_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, "Requires sequence of double or float values" + ) + return values + + def gds_format_boolean(self, input_data, input_name=""): + return ("%s" % input_data).lower() + + def gds_parse_boolean(self, input_data, node=None, input_name=""): + input_data = input_data.strip() + if input_data in ("true", "1"): + bval = True + elif input_data in ("false", "0"): + bval = False + else: + raise_parse_error(node, "Requires boolean value") + return bval + + def gds_validate_boolean(self, input_data, node=None, input_name=""): + if input_data not in ( + True, + 1, + False, + 0, + ): + raise_parse_error( + node, "Requires boolean value " "(one of True, 1, False, 0)" + ) + return input_data + + def gds_format_boolean_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_boolean_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in ( + True, + 1, + False, + 0, + ): + raise_parse_error( + node, + "Requires sequence of boolean values " + "(one of True, 1, False, 0)", + ) + return values + + def gds_validate_datetime(self, input_data, node=None, input_name=""): + return input_data + + def gds_format_datetime(self, input_data, input_name=""): + if input_data.microsecond == 0: + _svalue = "%04d-%02d-%02dT%02d:%02d:%02d" % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = "%04d-%02d-%02dT%02d:%02d:%02d.%s" % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ("%f" % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += "Z" + else: + if total_seconds < 0: + _svalue += "-" + total_seconds *= -1 + else: + _svalue += "+" + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += "{0:02d}:{1:02d}".format(hours, minutes) + return _svalue + + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == "Z": + tz = GeneratedsSuper._FixedOffsetTZ(0, "UTC") + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(":") + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == "-": + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ(tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split(".") + if len(time_parts) > 1: + micro_seconds = int(float("0." + time_parts[1]) * 1000000) + input_data = "%s.%s" % ( + time_parts[0], + "{}".format(micro_seconds).rjust(6, "0"), + ) + dt = datetime_.datetime.strptime(input_data, "%Y-%m-%dT%H:%M:%S.%f") + else: + dt = datetime_.datetime.strptime(input_data, "%Y-%m-%dT%H:%M:%S") + dt = dt.replace(tzinfo=tz) + return dt + + def gds_validate_date(self, input_data, node=None, input_name=""): + return input_data + + def gds_format_date(self, input_data, input_name=""): + _svalue = "%04d-%02d-%02d" % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += "Z" + else: + if total_seconds < 0: + _svalue += "-" + total_seconds *= -1 + else: + _svalue += "+" + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += "{0:02d}:{1:02d}".format(hours, minutes) + except AttributeError: + pass + return _svalue + + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == "Z": + tz = GeneratedsSuper._FixedOffsetTZ(0, "UTC") + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(":") + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == "-": + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ(tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, "%Y-%m-%d") + dt = dt.replace(tzinfo=tz) + return dt.date() + + def gds_validate_time(self, input_data, node=None, input_name=""): + return input_data + + def gds_format_time(self, input_data, input_name=""): + if input_data.microsecond == 0: + _svalue = "%02d:%02d:%02d" % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = "%02d:%02d:%02d.%s" % ( + input_data.hour, + input_data.minute, + input_data.second, + ("%f" % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += "Z" + else: + if total_seconds < 0: + _svalue += "-" + total_seconds *= -1 + else: + _svalue += "+" + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += "{0:02d}:{1:02d}".format(hours, minutes) + return _svalue + + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == "Z": + tz = GeneratedsSuper._FixedOffsetTZ(0, "UTC") + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(":") + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == "-": + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ(tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split(".")) > 1: + dt = datetime_.datetime.strptime(input_data, "%H:%M:%S.%f") + else: + dt = datetime_.datetime.strptime(input_data, "%H:%M:%S") + dt = dt.replace(tzinfo=tz) + return dt.time() + + def gds_check_cardinality_( + self, value, input_name, min_occurs=0, max_occurs=1, required=None + ): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None: + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_() + ) + ) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), min_occurs, length + ) + ) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), max_occurs, length + ) + ) + + def gds_validate_builtin_ST_( + self, + validator, + value, + input_name, + min_occurs=None, + max_occurs=None, + required=None, + ): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + + def gds_validate_defined_ST_( + self, + validator, + value, + input_name, + min_occurs=None, + max_occurs=None, + required=None, + ): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + + def gds_str_lower(self, instring): + return instring.lower() + + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = "/".join(path_list) + return path + + Tag_strip_pattern_ = re_.compile(r"\{.*\}") + + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub("", node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if "xsi" in node.nsmap: + classname = node.get("{%s}type" % node.nsmap["xsi"]) + if classname is not None: + names = classname.split(":") + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = "utf-8" + return instring.encode(encoding) + else: + return instring + + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode("utf8") + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + + def __eq__(self, other): + def excl_select_objs_(obj): + return obj[0] != "parent_object_" and obj[0] != "gds_collector_" + + if type(self) != type(other): + return False + return all( + x == y + for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()), + ) + ) + + def __ne__(self, other): + return not self.__eq__(other) + + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + + def gds_get_node_lineno_(self): + if ( + hasattr(self, "gds_elementtree_node_") + and self.gds_elementtree_node_ is not None + ): + return " near line {}".format(self.gds_elementtree_node_.sourceline) + else: + return "" + + def getSubclassFromModule_(module, class_): + """Get the subclass of a class from a specific module.""" + name = class_.__name__ + "Sub" + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = "" +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r"({.*})?(.*)") +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r"{(.*)}(.*)") +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(" ") + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return "" + s1 = isinstance(inStr, BaseStrType_) and inStr or "%s" % inStr + s2 = "" + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos : mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start() : mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace("&", "&") + s1 = s1.replace("<", "<") + s1 = s1.replace(">", ">") + return s1 + + +def quote_attrib(inStr): + s1 = isinstance(inStr, BaseStrType_) and inStr or "%s" % inStr + s1 = s1.replace("&", "&") + s1 = s1.replace("<", "<") + s1 = s1.replace(">", ">") + s1 = s1.replace("\n", " ") + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find("\n") == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find("\n") == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = "" + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(":") + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == "xml": + namespace = "http://www.w3.org/XML/1998/namespace" + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get( + "{%s}%s" + % ( + namespace, + name, + ) + ) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = "%s (element %s/line %d)" % ( + msg, + node.tag, + node.sourceline, + ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + + def getCategory(self): + return self.category + + def getContenttype(self, content_type): + return self.content_type + + def getValue(self): + return self.value + + def getName(self): + return self.name + + def export(self, outfile, level, name, namespace, pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, pretty_print=pretty_print + ) + + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write("<%s>%s" % (self.name, self.value, self.name)) + elif ( + self.content_type == MixedContainer.TypeInteger + or self.content_type == MixedContainer.TypeBoolean + ): + outfile.write("<%s>%d" % (self.name, self.value, self.name)) + elif ( + self.content_type == MixedContainer.TypeFloat + or self.content_type == MixedContainer.TypeDecimal + ): + outfile.write("<%s>%f" % (self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write("<%s>%g" % (self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write( + "<%s>%s" % (self.name, base64.b64encode(self.value), self.name) + ) + + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement(element, "%s" % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif ( + self.content_type == MixedContainer.TypeInteger + or self.content_type == MixedContainer.TypeBoolean + ): + text = "%d" % self.value + elif ( + self.content_type == MixedContainer.TypeFloat + or self.content_type == MixedContainer.TypeDecimal + ): + text = "%f" % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = "%g" % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = "%s" % base64.b64encode(self.value) + return text + + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' + % (self.category, self.content_type, self.name, self.value) + ) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' + % (self.category, self.content_type, self.name, self.value) + ) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' + % ( + self.category, + self.content_type, + self.name, + ) + ) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(")\n") + + +class MemberSpec_(object): + def __init__( + self, + name="", + data_type="", + container=0, + optional=0, + child_attrs=None, + choice=None, + ): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + + def set_name(self, name): + self.name = name + + def get_name(self): + return self.name + + def set_data_type(self, data_type): + self.data_type = data_type + + def get_data_type_chain(self): + return self.data_type + + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return "xs:string" + else: + return self.data_type + + def set_container(self, container): + self.container = container + + def get_container(self): + return self.container + + def set_child_attrs(self, child_attrs): + self.child_attrs = child_attrs + + def get_child_attrs(self): + return self.child_attrs + + def set_choice(self, choice): + self.choice = choice + + def get_choice(self): + return self.choice + + def set_optional(self, optional): + self.optional = optional + + def get_optional(self): + return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Data representation classes. +# + + +class EShipper(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__(self, version=None, QuoteReply=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.version = _cast(None, version) + self.version_nsprefix_ = None + self.QuoteReply = QuoteReply + self.QuoteReply_nsprefix_ = None + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, EShipper) + if subclass is not None: + return subclass(*args_, **kwargs_) + if EShipper.subclass: + return EShipper.subclass(*args_, **kwargs_) + else: + return EShipper(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_QuoteReply(self): + return self.QuoteReply + + def set_QuoteReply(self, QuoteReply): + self.QuoteReply = QuoteReply + + def get_version(self): + return self.version + + def set_version(self, version): + self.version = version + + def _hasContent(self): + if self.QuoteReply is not None: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="EShipper", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("EShipper") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "EShipper": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="EShipper" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="EShipper", + pretty_print=pretty_print, + ) + showIndent(outfile, level, pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="EShipper" + ): + if self.version is not None and "version" not in already_processed: + already_processed.add("version") + outfile.write( + " version=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.version), input_name="version" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="EShipper", + fromsubclass_=False, + pretty_print=True, + ): + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.QuoteReply is not None: + namespaceprefix_ = ( + self.QuoteReply_nsprefix_ + ":" + if (UseCapturedNS_ and self.QuoteReply_nsprefix_) + else "" + ) + self.QuoteReply.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="QuoteReply", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("version", node) + if value is not None and "version" not in already_processed: + already_processed.add("version") + self.version = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "QuoteReply": + obj_ = QuoteReplyType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.QuoteReply = obj_ + obj_.original_tagname_ = "QuoteReply" + + +# end class EShipper + + +class QuoteReplyType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__(self, Quote=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + if Quote is None: + self.Quote = [] + else: + self.Quote = Quote + self.Quote_nsprefix_ = None + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, QuoteReplyType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if QuoteReplyType.subclass: + return QuoteReplyType.subclass(*args_, **kwargs_) + else: + return QuoteReplyType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_Quote(self): + return self.Quote + + def set_Quote(self, Quote): + self.Quote = Quote + + def add_Quote(self, value): + self.Quote.append(value) + + def insert_Quote_at(self, index, value): + self.Quote.insert(index, value) + + def replace_Quote_at(self, index, value): + self.Quote[index] = value + + def _hasContent(self): + if self.Quote: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="QuoteReplyType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("QuoteReplyType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "QuoteReplyType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="QuoteReplyType" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="QuoteReplyType", + pretty_print=pretty_print, + ) + showIndent(outfile, level, pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="QuoteReplyType", + ): + pass + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="QuoteReplyType", + fromsubclass_=False, + pretty_print=True, + ): + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + for Quote_ in self.Quote: + namespaceprefix_ = ( + self.Quote_nsprefix_ + ":" + if (UseCapturedNS_ and self.Quote_nsprefix_) + else "" + ) + Quote_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Quote", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + pass + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "Quote": + obj_ = QuoteType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Quote.append(obj_) + obj_.original_tagname_ = "Quote" + + +# end class QuoteReplyType + + +class QuoteType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, + carrierId=None, + carrierName=None, + serviceId=None, + serviceName=None, + modeTransport=None, + transitDays=None, + baseCharge=None, + fuelSurcharge=None, + totalCharge=None, + currency=None, + Surcharge=None, + valueOf_=None, + mixedclass_=None, + content_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.carrierId = _cast(int, carrierId) + self.carrierId_nsprefix_ = None + self.carrierName = _cast(None, carrierName) + self.carrierName_nsprefix_ = None + self.serviceId = _cast(int, serviceId) + self.serviceId_nsprefix_ = None + self.serviceName = _cast(None, serviceName) + self.serviceName_nsprefix_ = None + self.modeTransport = _cast(None, modeTransport) + self.modeTransport_nsprefix_ = None + self.transitDays = _cast(int, transitDays) + self.transitDays_nsprefix_ = None + self.baseCharge = _cast(float, baseCharge) + self.baseCharge_nsprefix_ = None + self.fuelSurcharge = _cast(float, fuelSurcharge) + self.fuelSurcharge_nsprefix_ = None + self.totalCharge = _cast(float, totalCharge) + self.totalCharge_nsprefix_ = None + self.currency = _cast(None, currency) + self.currency_nsprefix_ = None + if Surcharge is None: + self.Surcharge = [] + else: + self.Surcharge = Surcharge + self.Surcharge_nsprefix_ = None + self.valueOf_ = valueOf_ + if mixedclass_ is None: + self.mixedclass_ = MixedContainer + else: + self.mixedclass_ = mixedclass_ + if content_ is None: + self.content_ = [] + else: + self.content_ = content_ + self.valueOf_ = valueOf_ + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, QuoteType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if QuoteType.subclass: + return QuoteType.subclass(*args_, **kwargs_) + else: + return QuoteType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_Surcharge(self): + return self.Surcharge + + def set_Surcharge(self, Surcharge): + self.Surcharge = Surcharge + + def add_Surcharge(self, value): + self.Surcharge.append(value) + + def insert_Surcharge_at(self, index, value): + self.Surcharge.insert(index, value) + + def replace_Surcharge_at(self, index, value): + self.Surcharge[index] = value + + def get_carrierId(self): + return self.carrierId + + def set_carrierId(self, carrierId): + self.carrierId = carrierId + + def get_carrierName(self): + return self.carrierName + + def set_carrierName(self, carrierName): + self.carrierName = carrierName + + def get_serviceId(self): + return self.serviceId + + def set_serviceId(self, serviceId): + self.serviceId = serviceId + + def get_serviceName(self): + return self.serviceName + + def set_serviceName(self, serviceName): + self.serviceName = serviceName + + def get_modeTransport(self): + return self.modeTransport + + def set_modeTransport(self, modeTransport): + self.modeTransport = modeTransport + + def get_transitDays(self): + return self.transitDays + + def set_transitDays(self, transitDays): + self.transitDays = transitDays + + def get_baseCharge(self): + return self.baseCharge + + def set_baseCharge(self, baseCharge): + self.baseCharge = baseCharge + + def get_fuelSurcharge(self): + return self.fuelSurcharge + + def set_fuelSurcharge(self, fuelSurcharge): + self.fuelSurcharge = fuelSurcharge + + def get_totalCharge(self): + return self.totalCharge + + def set_totalCharge(self, totalCharge): + self.totalCharge = totalCharge + + def get_currency(self): + return self.currency + + def set_currency(self, currency): + self.currency = currency + + def get_valueOf_(self): + return self.valueOf_ + + def set_valueOf_(self, valueOf_): + self.valueOf_ = valueOf_ + + def _hasContent(self): + if ( + self.Surcharge + or (1 if type(self.valueOf_) in [int, float] else self.valueOf_) + or self.content_ + ): + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="QuoteType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("QuoteType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "QuoteType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="QuoteType" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="QuoteType", + pretty_print=pretty_print, + ) + showIndent(outfile, level, pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="QuoteType" + ): + if self.carrierId is not None and "carrierId" not in already_processed: + already_processed.add("carrierId") + outfile.write( + ' carrierId="%s"' + % self.gds_format_integer(self.carrierId, input_name="carrierId") + ) + if self.carrierName is not None and "carrierName" not in already_processed: + already_processed.add("carrierName") + outfile.write( + " carrierName=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.carrierName), input_name="carrierName" + ) + ), + ) + ) + if self.serviceId is not None and "serviceId" not in already_processed: + already_processed.add("serviceId") + outfile.write( + ' serviceId="%s"' + % self.gds_format_integer(self.serviceId, input_name="serviceId") + ) + if self.serviceName is not None and "serviceName" not in already_processed: + already_processed.add("serviceName") + outfile.write( + " serviceName=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.serviceName), input_name="serviceName" + ) + ), + ) + ) + if self.modeTransport is not None and "modeTransport" not in already_processed: + already_processed.add("modeTransport") + outfile.write( + " modeTransport=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.modeTransport), input_name="modeTransport" + ) + ), + ) + ) + if self.transitDays is not None and "transitDays" not in already_processed: + already_processed.add("transitDays") + outfile.write( + ' transitDays="%s"' + % self.gds_format_integer(self.transitDays, input_name="transitDays") + ) + if self.baseCharge is not None and "baseCharge" not in already_processed: + already_processed.add("baseCharge") + outfile.write( + ' baseCharge="%s"' + % self.gds_format_float(self.baseCharge, input_name="baseCharge") + ) + if self.fuelSurcharge is not None and "fuelSurcharge" not in already_processed: + already_processed.add("fuelSurcharge") + outfile.write( + ' fuelSurcharge="%s"' + % self.gds_format_float(self.fuelSurcharge, input_name="fuelSurcharge") + ) + if self.totalCharge is not None and "totalCharge" not in already_processed: + already_processed.add("totalCharge") + outfile.write( + ' totalCharge="%s"' + % self.gds_format_float(self.totalCharge, input_name="totalCharge") + ) + if self.currency is not None and "currency" not in already_processed: + already_processed.add("currency") + outfile.write( + " currency=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.currency), input_name="currency" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="QuoteType", + fromsubclass_=False, + pretty_print=True, + ): + if not fromsubclass_: + for item_ in self.content_: + item_.export( + outfile, + level, + item_.name, + namespaceprefix_, + pretty_print=pretty_print, + ) + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + for Surcharge_ in self.Surcharge: + namespaceprefix_ = ( + self.Surcharge_nsprefix_ + ":" + if (UseCapturedNS_ and self.Surcharge_nsprefix_) + else "" + ) + Surcharge_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Surcharge", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + if node.text is not None: + obj_ = self.mixedclass_( + MixedContainer.CategoryText, MixedContainer.TypeNone, "", node.text + ) + self.content_.append(obj_) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("carrierId", node) + if value is not None and "carrierId" not in already_processed: + already_processed.add("carrierId") + self.carrierId = self.gds_parse_integer(value, node, "carrierId") + value = find_attr_value_("carrierName", node) + if value is not None and "carrierName" not in already_processed: + already_processed.add("carrierName") + self.carrierName = value + value = find_attr_value_("serviceId", node) + if value is not None and "serviceId" not in already_processed: + already_processed.add("serviceId") + self.serviceId = self.gds_parse_integer(value, node, "serviceId") + value = find_attr_value_("serviceName", node) + if value is not None and "serviceName" not in already_processed: + already_processed.add("serviceName") + self.serviceName = value + value = find_attr_value_("modeTransport", node) + if value is not None and "modeTransport" not in already_processed: + already_processed.add("modeTransport") + self.modeTransport = value + value = find_attr_value_("transitDays", node) + if value is not None and "transitDays" not in already_processed: + already_processed.add("transitDays") + self.transitDays = self.gds_parse_integer(value, node, "transitDays") + value = find_attr_value_("baseCharge", node) + if value is not None and "baseCharge" not in already_processed: + already_processed.add("baseCharge") + value = self.gds_parse_float(value, node, "baseCharge") + self.baseCharge = value + value = find_attr_value_("fuelSurcharge", node) + if value is not None and "fuelSurcharge" not in already_processed: + already_processed.add("fuelSurcharge") + value = self.gds_parse_float(value, node, "fuelSurcharge") + self.fuelSurcharge = value + value = find_attr_value_("totalCharge", node) + if value is not None and "totalCharge" not in already_processed: + already_processed.add("totalCharge") + value = self.gds_parse_float(value, node, "totalCharge") + self.totalCharge = value + value = find_attr_value_("currency", node) + if value is not None and "currency" not in already_processed: + already_processed.add("currency") + self.currency = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "Surcharge": + obj_ = SurchargeType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + obj_ = self.mixedclass_( + MixedContainer.CategoryComplex, + MixedContainer.TypeNone, + "Surcharge", + obj_, + ) + self.content_.append(obj_) + if hasattr(self, "add_Surcharge"): + self.add_Surcharge(obj_.value) + elif hasattr(self, "set_Surcharge"): + self.set_Surcharge(obj_.value) + if not fromsubclass_ and child_.tail is not None: + obj_ = self.mixedclass_( + MixedContainer.CategoryText, MixedContainer.TypeNone, "", child_.tail + ) + self.content_.append(obj_) + + +# end class QuoteType + + +class SurchargeType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, + id=None, + name=None, + amount=None, + valueOf_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.id = _cast(None, id) + self.id_nsprefix_ = None + self.name = _cast(None, name) + self.name_nsprefix_ = None + self.amount = _cast(float, amount) + self.amount_nsprefix_ = None + self.valueOf_ = valueOf_ + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, SurchargeType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if SurchargeType.subclass: + return SurchargeType.subclass(*args_, **kwargs_) + else: + return SurchargeType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_id(self): + return self.id + + def set_id(self, id): + self.id = id + + def get_name(self): + return self.name + + def set_name(self, name): + self.name = name + + def get_amount(self): + return self.amount + + def set_amount(self, amount): + self.amount = amount + + def get_valueOf_(self): + return self.valueOf_ + + def set_valueOf_(self, valueOf_): + self.valueOf_ = valueOf_ + + def _hasContent(self): + if 1 if type(self.valueOf_) in [int, float] else self.valueOf_: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SurchargeType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SurchargeType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "SurchargeType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SurchargeType" + ) + if self._hasContent(): + outfile.write(">") + outfile.write(self.convert_unicode(self.valueOf_)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SurchargeType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SurchargeType", + ): + if self.id is not None and "id" not in already_processed: + already_processed.add("id") + outfile.write( + " id=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.id), input_name="id") + ), + ) + ) + if self.name is not None and "name" not in already_processed: + already_processed.add("name") + outfile.write( + " name=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.name), input_name="name" + ) + ), + ) + ) + if self.amount is not None and "amount" not in already_processed: + already_processed.add("amount") + outfile.write( + ' amount="%s"' % self.gds_format_float(self.amount, input_name="amount") + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SurchargeType", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("id", node) + if value is not None and "id" not in already_processed: + already_processed.add("id") + self.id = value + value = find_attr_value_("name", node) + if value is not None and "name" not in already_processed: + already_processed.add("name") + self.name = value + value = find_attr_value_("amount", node) + if value is not None and "amount" not in already_processed: + already_processed.add("amount") + value = self.gds_parse_float(value, node, "amount") + self.amount = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class SurchargeType + + +GDSClassesMapping = {} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + """Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + """ + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = " ".join( + ['xmlns:{}="{}"'.format(prefix, uri) for prefix, uri in nsmap.items()] + ) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = "EShipper" + rootClass = EShipper + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, namespacedef_=namespacedefs, pretty_print=True + ) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree( + inFileName, + silence=False, + print_warnings=True, + mapping=None, + reverse_mapping=None, + nsmap=None, +): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = "EShipper" + rootClass = EShipper + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, + name_=rootTag, + mapping_=mapping, + reverse_mapping_=reverse_mapping, + nsmap_=nsmap, + ) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, xml_declaration=True, encoding="utf-8" + ) + sys.stdout.write(str(content)) + sys.stdout.write("\n") + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + """Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + """ + parser = None + rootNode = parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = "EShipper" + rootClass = EShipper + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export(sys.stdout, 0, name_=rootTag, namespacedef_="") + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = "EShipper" + rootClass = EShipper + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write("#from quote_reply import *\n\n") + sys.stdout.write("import quote_reply as model_\n\n") + sys.stdout.write("rootObj = model_.rootClass(\n") + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(")\n") + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == "__main__": + # import pdb; pdb.set_trace() + main() + +RenameMappings_ = {} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {"http://www.eshipper.net/XMLSchema": []} + +__all__ = ["EShipper", "QuoteReplyType", "QuoteType", "SurchargeType"] diff --git a/modules/connectors/eshipper_xml/karrio/schemas/eshipper_xml/quote_request.py b/modules/connectors/eshipper_xml/karrio/schemas/eshipper_xml/quote_request.py new file mode 100644 index 0000000000..09c050dad7 --- /dev/null +++ b/modules/connectors/eshipper_xml/karrio/schemas/eshipper_xml/quote_request.py @@ -0,0 +1,4906 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Mon Oct 24 11:17:20 2022 by generateDS.py version 2.41.1. +# Python 3.10.6 (main, Aug 30 2022, 05:12:36) [Clang 13.1.6 (clang-1316.0.21.2.5)] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio.schemas.eshipper_xml/quote_request.py') +# +# Command line arguments: +# ./vendor/schemas/quote_request.xsd +# +# Command line: +# /Users/danielkobina/Workspace/project/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio.schemas.eshipper_xml/quote_request.py" ./vendor/schemas/quote_request.xsd +# +# Current working directory (os.getcwd()): +# eshipper_xml +# + +import sys + +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import ( + GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_, + ) +except ModulenotfoundExp_: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_: + + class GdsCollector_(object): + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_: + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r"(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$") + + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return self.__name + + def dst(self, dt): + return None + + def __str__(self): + settings = { + "str_pretty_print": True, + "str_indent_level": 0, + "str_namespaceprefix": "", + "str_name": self.__class__.__name__, + "str_namespacedefs": "", + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings["str_indent_level"], + pretty_print=settings["str_pretty_print"], + namespaceprefix_=settings["str_namespaceprefix"], + name_=settings["str_name"], + namespacedef_=settings["str_namespacedefs"], + ) + strval = output.getvalue() + output.close() + return strval + + def gds_format_string(self, input_data, input_name=""): + return input_data + + def gds_parse_string(self, input_data, node=None, input_name=""): + return input_data + + def gds_validate_string(self, input_data, node=None, input_name=""): + if not input_data: + return "" + else: + return input_data + + def gds_format_base64(self, input_data, input_name=""): + return base64.b64encode(input_data).decode("ascii") + + def gds_validate_base64(self, input_data, node=None, input_name=""): + return input_data + + def gds_format_integer(self, input_data, input_name=""): + return "%d" % int(input_data) + + def gds_parse_integer(self, input_data, node=None, input_name=""): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, "Requires integer value: %s" % exp) + return ival + + def gds_validate_integer(self, input_data, node=None, input_name=""): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires integer value") + return value + + def gds_format_integer_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_integer_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, "Requires sequence of integer values") + return values + + def gds_format_float(self, input_data, input_name=""): + return ("%.15f" % float(input_data)).rstrip("0") + + def gds_parse_float(self, input_data, node=None, input_name=""): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, "Requires float or double value: %s" % exp) + return fval_ + + def gds_validate_float(self, input_data, node=None, input_name=""): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires float value") + return value + + def gds_format_float_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_float_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, "Requires sequence of float values") + return values + + def gds_format_decimal(self, input_data, input_name=""): + return_value = "%s" % input_data + if "." in return_value: + return_value = return_value.rstrip("0") + if return_value.endswith("."): + return_value = return_value.rstrip(".") + return return_value + + def gds_parse_decimal(self, input_data, node=None, input_name=""): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires decimal value") + return decimal_value + + def gds_validate_decimal(self, input_data, node=None, input_name=""): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires decimal value") + return value + + def gds_format_decimal_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return " ".join([self.gds_format_decimal(item) for item in input_data]) + + def gds_validate_decimal_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, "Requires sequence of decimal values") + return values + + def gds_format_double(self, input_data, input_name=""): + return "%s" % input_data + + def gds_parse_double(self, input_data, node=None, input_name=""): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, "Requires double or float value: %s" % exp) + return fval_ + + def gds_validate_double(self, input_data, node=None, input_name=""): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires double or float value") + return value + + def gds_format_double_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_double_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, "Requires sequence of double or float values" + ) + return values + + def gds_format_boolean(self, input_data, input_name=""): + return ("%s" % input_data).lower() + + def gds_parse_boolean(self, input_data, node=None, input_name=""): + input_data = input_data.strip() + if input_data in ("true", "1"): + bval = True + elif input_data in ("false", "0"): + bval = False + else: + raise_parse_error(node, "Requires boolean value") + return bval + + def gds_validate_boolean(self, input_data, node=None, input_name=""): + if input_data not in ( + True, + 1, + False, + 0, + ): + raise_parse_error( + node, "Requires boolean value " "(one of True, 1, False, 0)" + ) + return input_data + + def gds_format_boolean_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_boolean_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in ( + True, + 1, + False, + 0, + ): + raise_parse_error( + node, + "Requires sequence of boolean values " + "(one of True, 1, False, 0)", + ) + return values + + def gds_validate_datetime(self, input_data, node=None, input_name=""): + return input_data + + def gds_format_datetime(self, input_data, input_name=""): + if input_data.microsecond == 0: + _svalue = "%04d-%02d-%02dT%02d:%02d:%02d" % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = "%04d-%02d-%02dT%02d:%02d:%02d.%s" % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ("%f" % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += "Z" + else: + if total_seconds < 0: + _svalue += "-" + total_seconds *= -1 + else: + _svalue += "+" + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += "{0:02d}:{1:02d}".format(hours, minutes) + return _svalue + + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == "Z": + tz = GeneratedsSuper._FixedOffsetTZ(0, "UTC") + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(":") + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == "-": + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ(tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split(".") + if len(time_parts) > 1: + micro_seconds = int(float("0." + time_parts[1]) * 1000000) + input_data = "%s.%s" % ( + time_parts[0], + "{}".format(micro_seconds).rjust(6, "0"), + ) + dt = datetime_.datetime.strptime(input_data, "%Y-%m-%dT%H:%M:%S.%f") + else: + dt = datetime_.datetime.strptime(input_data, "%Y-%m-%dT%H:%M:%S") + dt = dt.replace(tzinfo=tz) + return dt + + def gds_validate_date(self, input_data, node=None, input_name=""): + return input_data + + def gds_format_date(self, input_data, input_name=""): + _svalue = "%04d-%02d-%02d" % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += "Z" + else: + if total_seconds < 0: + _svalue += "-" + total_seconds *= -1 + else: + _svalue += "+" + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += "{0:02d}:{1:02d}".format(hours, minutes) + except AttributeError: + pass + return _svalue + + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == "Z": + tz = GeneratedsSuper._FixedOffsetTZ(0, "UTC") + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(":") + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == "-": + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ(tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, "%Y-%m-%d") + dt = dt.replace(tzinfo=tz) + return dt.date() + + def gds_validate_time(self, input_data, node=None, input_name=""): + return input_data + + def gds_format_time(self, input_data, input_name=""): + if input_data.microsecond == 0: + _svalue = "%02d:%02d:%02d" % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = "%02d:%02d:%02d.%s" % ( + input_data.hour, + input_data.minute, + input_data.second, + ("%f" % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += "Z" + else: + if total_seconds < 0: + _svalue += "-" + total_seconds *= -1 + else: + _svalue += "+" + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += "{0:02d}:{1:02d}".format(hours, minutes) + return _svalue + + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == "Z": + tz = GeneratedsSuper._FixedOffsetTZ(0, "UTC") + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(":") + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == "-": + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ(tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split(".")) > 1: + dt = datetime_.datetime.strptime(input_data, "%H:%M:%S.%f") + else: + dt = datetime_.datetime.strptime(input_data, "%H:%M:%S") + dt = dt.replace(tzinfo=tz) + return dt.time() + + def gds_check_cardinality_( + self, value, input_name, min_occurs=0, max_occurs=1, required=None + ): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None: + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_() + ) + ) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), min_occurs, length + ) + ) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), max_occurs, length + ) + ) + + def gds_validate_builtin_ST_( + self, + validator, + value, + input_name, + min_occurs=None, + max_occurs=None, + required=None, + ): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + + def gds_validate_defined_ST_( + self, + validator, + value, + input_name, + min_occurs=None, + max_occurs=None, + required=None, + ): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + + def gds_str_lower(self, instring): + return instring.lower() + + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = "/".join(path_list) + return path + + Tag_strip_pattern_ = re_.compile(r"\{.*\}") + + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub("", node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if "xsi" in node.nsmap: + classname = node.get("{%s}type" % node.nsmap["xsi"]) + if classname is not None: + names = classname.split(":") + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = "utf-8" + return instring.encode(encoding) + else: + return instring + + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode("utf8") + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + + def __eq__(self, other): + def excl_select_objs_(obj): + return obj[0] != "parent_object_" and obj[0] != "gds_collector_" + + if type(self) != type(other): + return False + return all( + x == y + for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()), + ) + ) + + def __ne__(self, other): + return not self.__eq__(other) + + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + + def gds_get_node_lineno_(self): + if ( + hasattr(self, "gds_elementtree_node_") + and self.gds_elementtree_node_ is not None + ): + return " near line {}".format(self.gds_elementtree_node_.sourceline) + else: + return "" + + def getSubclassFromModule_(module, class_): + """Get the subclass of a class from a specific module.""" + name = class_.__name__ + "Sub" + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = "" +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r"({.*})?(.*)") +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r"{(.*)}(.*)") +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(" ") + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return "" + s1 = isinstance(inStr, BaseStrType_) and inStr or "%s" % inStr + s2 = "" + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos : mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start() : mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace("&", "&") + s1 = s1.replace("<", "<") + s1 = s1.replace(">", ">") + return s1 + + +def quote_attrib(inStr): + s1 = isinstance(inStr, BaseStrType_) and inStr or "%s" % inStr + s1 = s1.replace("&", "&") + s1 = s1.replace("<", "<") + s1 = s1.replace(">", ">") + s1 = s1.replace("\n", " ") + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find("\n") == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find("\n") == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = "" + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(":") + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == "xml": + namespace = "http://www.w3.org/XML/1998/namespace" + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get( + "{%s}%s" + % ( + namespace, + name, + ) + ) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = "%s (element %s/line %d)" % ( + msg, + node.tag, + node.sourceline, + ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + + def getCategory(self): + return self.category + + def getContenttype(self, content_type): + return self.content_type + + def getValue(self): + return self.value + + def getName(self): + return self.name + + def export(self, outfile, level, name, namespace, pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, pretty_print=pretty_print + ) + + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write("<%s>%s" % (self.name, self.value, self.name)) + elif ( + self.content_type == MixedContainer.TypeInteger + or self.content_type == MixedContainer.TypeBoolean + ): + outfile.write("<%s>%d" % (self.name, self.value, self.name)) + elif ( + self.content_type == MixedContainer.TypeFloat + or self.content_type == MixedContainer.TypeDecimal + ): + outfile.write("<%s>%f" % (self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write("<%s>%g" % (self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write( + "<%s>%s" % (self.name, base64.b64encode(self.value), self.name) + ) + + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement(element, "%s" % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif ( + self.content_type == MixedContainer.TypeInteger + or self.content_type == MixedContainer.TypeBoolean + ): + text = "%d" % self.value + elif ( + self.content_type == MixedContainer.TypeFloat + or self.content_type == MixedContainer.TypeDecimal + ): + text = "%f" % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = "%g" % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = "%s" % base64.b64encode(self.value) + return text + + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' + % (self.category, self.content_type, self.name, self.value) + ) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' + % (self.category, self.content_type, self.name, self.value) + ) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' + % ( + self.category, + self.content_type, + self.name, + ) + ) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(")\n") + + +class MemberSpec_(object): + def __init__( + self, + name="", + data_type="", + container=0, + optional=0, + child_attrs=None, + choice=None, + ): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + + def set_name(self, name): + self.name = name + + def get_name(self): + return self.name + + def set_data_type(self, data_type): + self.data_type = data_type + + def get_data_type_chain(self): + return self.data_type + + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return "xs:string" + else: + return self.data_type + + def set_container(self, container): + self.container = container + + def get_container(self): + return self.container + + def set_child_attrs(self, child_attrs): + self.child_attrs = child_attrs + + def get_child_attrs(self): + return self.child_attrs + + def set_choice(self, choice): + self.choice = choice + + def get_choice(self): + return self.choice + + def set_optional(self, optional): + self.optional = optional + + def get_optional(self): + return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Data representation classes. +# + + +class EShipper(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, + username=None, + password=None, + version=None, + QuoteRequest=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.username = _cast(None, username) + self.username_nsprefix_ = None + self.password = _cast(None, password) + self.password_nsprefix_ = None + self.version = _cast(None, version) + self.version_nsprefix_ = None + self.QuoteRequest = QuoteRequest + self.QuoteRequest_nsprefix_ = None + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, EShipper) + if subclass is not None: + return subclass(*args_, **kwargs_) + if EShipper.subclass: + return EShipper.subclass(*args_, **kwargs_) + else: + return EShipper(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_QuoteRequest(self): + return self.QuoteRequest + + def set_QuoteRequest(self, QuoteRequest): + self.QuoteRequest = QuoteRequest + + def get_username(self): + return self.username + + def set_username(self, username): + self.username = username + + def get_password(self): + return self.password + + def set_password(self, password): + self.password = password + + def get_version(self): + return self.version + + def set_version(self, version): + self.version = version + + def _hasContent(self): + if self.QuoteRequest is not None: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="EShipper", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("EShipper") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "EShipper": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="EShipper" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="EShipper", + pretty_print=pretty_print, + ) + showIndent(outfile, level, pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="EShipper" + ): + if self.username is not None and "username" not in already_processed: + already_processed.add("username") + outfile.write( + " username=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.username), input_name="username" + ) + ), + ) + ) + if self.password is not None and "password" not in already_processed: + already_processed.add("password") + outfile.write( + " password=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.password), input_name="password" + ) + ), + ) + ) + if self.version is not None and "version" not in already_processed: + already_processed.add("version") + outfile.write( + " version=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.version), input_name="version" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="EShipper", + fromsubclass_=False, + pretty_print=True, + ): + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.QuoteRequest is not None: + namespaceprefix_ = ( + self.QuoteRequest_nsprefix_ + ":" + if (UseCapturedNS_ and self.QuoteRequest_nsprefix_) + else "" + ) + self.QuoteRequest.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="QuoteRequest", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("username", node) + if value is not None and "username" not in already_processed: + already_processed.add("username") + self.username = value + value = find_attr_value_("password", node) + if value is not None and "password" not in already_processed: + already_processed.add("password") + self.password = value + value = find_attr_value_("version", node) + if value is not None and "version" not in already_processed: + already_processed.add("version") + self.version = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "QuoteRequest": + obj_ = QuoteRequestType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.QuoteRequest = obj_ + obj_.original_tagname_ = "QuoteRequest" + + +# end class EShipper + + +class QuoteRequestType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, + serviceId=None, + SortandSegregateCharge=None, + homelandSecurity=None, + pierCharge=None, + exhibitionConventionSite=None, + militaryBaseDelivery=None, + customsIn_bondFreight=None, + limitedAccess=None, + excessLength=None, + crossBorderFee=None, + singleShipment=None, + saturdayPickupRequired=None, + insidePickup=None, + insuranceType=None, + scheduledShipDate=None, + insideDelivery=None, + deliveryAppointment=None, + specialEquipment=None, + holdForPickupRequired=None, + signatureRequired=None, + isSaturdayService=None, + dangerousGoodsType=None, + stackable=None, + From=None, + To=None, + COD=None, + Packages=None, + Pickup=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.serviceId = _cast(int, serviceId) + self.serviceId_nsprefix_ = None + self.SortandSegregateCharge = _cast(None, SortandSegregateCharge) + self.SortandSegregateCharge_nsprefix_ = None + self.homelandSecurity = _cast(None, homelandSecurity) + self.homelandSecurity_nsprefix_ = None + self.pierCharge = _cast(None, pierCharge) + self.pierCharge_nsprefix_ = None + self.exhibitionConventionSite = _cast(None, exhibitionConventionSite) + self.exhibitionConventionSite_nsprefix_ = None + self.militaryBaseDelivery = _cast(None, militaryBaseDelivery) + self.militaryBaseDelivery_nsprefix_ = None + self.customsIn_bondFreight = _cast(None, customsIn_bondFreight) + self.customsIn_bondFreight_nsprefix_ = None + self.limitedAccess = _cast(None, limitedAccess) + self.limitedAccess_nsprefix_ = None + self.excessLength = _cast(None, excessLength) + self.excessLength_nsprefix_ = None + self.crossBorderFee = _cast(None, crossBorderFee) + self.crossBorderFee_nsprefix_ = None + self.singleShipment = _cast(None, singleShipment) + self.singleShipment_nsprefix_ = None + self.saturdayPickupRequired = _cast(None, saturdayPickupRequired) + self.saturdayPickupRequired_nsprefix_ = None + self.insidePickup = _cast(None, insidePickup) + self.insidePickup_nsprefix_ = None + self.insuranceType = _cast(None, insuranceType) + self.insuranceType_nsprefix_ = None + self.scheduledShipDate = _cast(None, scheduledShipDate) + self.scheduledShipDate_nsprefix_ = None + self.insideDelivery = _cast(None, insideDelivery) + self.insideDelivery_nsprefix_ = None + self.deliveryAppointment = _cast(None, deliveryAppointment) + self.deliveryAppointment_nsprefix_ = None + self.specialEquipment = _cast(None, specialEquipment) + self.specialEquipment_nsprefix_ = None + self.holdForPickupRequired = _cast(None, holdForPickupRequired) + self.holdForPickupRequired_nsprefix_ = None + self.signatureRequired = _cast(None, signatureRequired) + self.signatureRequired_nsprefix_ = None + self.isSaturdayService = _cast(None, isSaturdayService) + self.isSaturdayService_nsprefix_ = None + self.dangerousGoodsType = _cast(None, dangerousGoodsType) + self.dangerousGoodsType_nsprefix_ = None + self.stackable = _cast(None, stackable) + self.stackable_nsprefix_ = None + self.From = From + self.From_nsprefix_ = None + self.To = To + self.To_nsprefix_ = None + self.COD = COD + self.COD_nsprefix_ = None + self.Packages = Packages + self.Packages_nsprefix_ = None + self.Pickup = Pickup + self.Pickup_nsprefix_ = None + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, QuoteRequestType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if QuoteRequestType.subclass: + return QuoteRequestType.subclass(*args_, **kwargs_) + else: + return QuoteRequestType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_From(self): + return self.From + + def set_From(self, From): + self.From = From + + def get_To(self): + return self.To + + def set_To(self, To): + self.To = To + + def get_COD(self): + return self.COD + + def set_COD(self, COD): + self.COD = COD + + def get_Packages(self): + return self.Packages + + def set_Packages(self, Packages): + self.Packages = Packages + + def get_Pickup(self): + return self.Pickup + + def set_Pickup(self, Pickup): + self.Pickup = Pickup + + def get_serviceId(self): + return self.serviceId + + def set_serviceId(self, serviceId): + self.serviceId = serviceId + + def get_SortandSegregateCharge(self): + return self.SortandSegregateCharge + + def set_SortandSegregateCharge(self, SortandSegregateCharge): + self.SortandSegregateCharge = SortandSegregateCharge + + def get_homelandSecurity(self): + return self.homelandSecurity + + def set_homelandSecurity(self, homelandSecurity): + self.homelandSecurity = homelandSecurity + + def get_pierCharge(self): + return self.pierCharge + + def set_pierCharge(self, pierCharge): + self.pierCharge = pierCharge + + def get_exhibitionConventionSite(self): + return self.exhibitionConventionSite + + def set_exhibitionConventionSite(self, exhibitionConventionSite): + self.exhibitionConventionSite = exhibitionConventionSite + + def get_militaryBaseDelivery(self): + return self.militaryBaseDelivery + + def set_militaryBaseDelivery(self, militaryBaseDelivery): + self.militaryBaseDelivery = militaryBaseDelivery + + def get_customsIn_bondFreight(self): + return self.customsIn_bondFreight + + def set_customsIn_bondFreight(self, customsIn_bondFreight): + self.customsIn_bondFreight = customsIn_bondFreight + + def get_limitedAccess(self): + return self.limitedAccess + + def set_limitedAccess(self, limitedAccess): + self.limitedAccess = limitedAccess + + def get_excessLength(self): + return self.excessLength + + def set_excessLength(self, excessLength): + self.excessLength = excessLength + + def get_crossBorderFee(self): + return self.crossBorderFee + + def set_crossBorderFee(self, crossBorderFee): + self.crossBorderFee = crossBorderFee + + def get_singleShipment(self): + return self.singleShipment + + def set_singleShipment(self, singleShipment): + self.singleShipment = singleShipment + + def get_saturdayPickupRequired(self): + return self.saturdayPickupRequired + + def set_saturdayPickupRequired(self, saturdayPickupRequired): + self.saturdayPickupRequired = saturdayPickupRequired + + def get_insidePickup(self): + return self.insidePickup + + def set_insidePickup(self, insidePickup): + self.insidePickup = insidePickup + + def get_insuranceType(self): + return self.insuranceType + + def set_insuranceType(self, insuranceType): + self.insuranceType = insuranceType + + def get_scheduledShipDate(self): + return self.scheduledShipDate + + def set_scheduledShipDate(self, scheduledShipDate): + self.scheduledShipDate = scheduledShipDate + + def get_insideDelivery(self): + return self.insideDelivery + + def set_insideDelivery(self, insideDelivery): + self.insideDelivery = insideDelivery + + def get_deliveryAppointment(self): + return self.deliveryAppointment + + def set_deliveryAppointment(self, deliveryAppointment): + self.deliveryAppointment = deliveryAppointment + + def get_specialEquipment(self): + return self.specialEquipment + + def set_specialEquipment(self, specialEquipment): + self.specialEquipment = specialEquipment + + def get_holdForPickupRequired(self): + return self.holdForPickupRequired + + def set_holdForPickupRequired(self, holdForPickupRequired): + self.holdForPickupRequired = holdForPickupRequired + + def get_signatureRequired(self): + return self.signatureRequired + + def set_signatureRequired(self, signatureRequired): + self.signatureRequired = signatureRequired + + def get_isSaturdayService(self): + return self.isSaturdayService + + def set_isSaturdayService(self, isSaturdayService): + self.isSaturdayService = isSaturdayService + + def get_dangerousGoodsType(self): + return self.dangerousGoodsType + + def set_dangerousGoodsType(self, dangerousGoodsType): + self.dangerousGoodsType = dangerousGoodsType + + def get_stackable(self): + return self.stackable + + def set_stackable(self, stackable): + self.stackable = stackable + + def _hasContent(self): + if ( + self.From is not None + or self.To is not None + or self.COD is not None + or self.Packages is not None + or self.Pickup is not None + ): + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="QuoteRequestType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("QuoteRequestType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "QuoteRequestType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="QuoteRequestType", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="QuoteRequestType", + pretty_print=pretty_print, + ) + showIndent(outfile, level, pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="QuoteRequestType", + ): + if self.serviceId is not None and "serviceId" not in already_processed: + already_processed.add("serviceId") + outfile.write( + ' serviceId="%s"' + % self.gds_format_integer(self.serviceId, input_name="serviceId") + ) + if ( + self.SortandSegregateCharge is not None + and "SortandSegregateCharge" not in already_processed + ): + already_processed.add("SortandSegregateCharge") + outfile.write( + " SortandSegregateCharge=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.SortandSegregateCharge), + input_name="SortandSegregateCharge", + ) + ), + ) + ) + if ( + self.homelandSecurity is not None + and "homelandSecurity" not in already_processed + ): + already_processed.add("homelandSecurity") + outfile.write( + " homelandSecurity=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.homelandSecurity), + input_name="homelandSecurity", + ) + ), + ) + ) + if self.pierCharge is not None and "pierCharge" not in already_processed: + already_processed.add("pierCharge") + outfile.write( + " pierCharge=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.pierCharge), input_name="pierCharge" + ) + ), + ) + ) + if ( + self.exhibitionConventionSite is not None + and "exhibitionConventionSite" not in already_processed + ): + already_processed.add("exhibitionConventionSite") + outfile.write( + " exhibitionConventionSite=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.exhibitionConventionSite), + input_name="exhibitionConventionSite", + ) + ), + ) + ) + if ( + self.militaryBaseDelivery is not None + and "militaryBaseDelivery" not in already_processed + ): + already_processed.add("militaryBaseDelivery") + outfile.write( + " militaryBaseDelivery=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.militaryBaseDelivery), + input_name="militaryBaseDelivery", + ) + ), + ) + ) + if ( + self.customsIn_bondFreight is not None + and "customsIn_bondFreight" not in already_processed + ): + already_processed.add("customsIn_bondFreight") + outfile.write( + " customsIn-bondFreight=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.customsIn_bondFreight), + input_name="customsIn-bondFreight", + ) + ), + ) + ) + if self.limitedAccess is not None and "limitedAccess" not in already_processed: + already_processed.add("limitedAccess") + outfile.write( + " limitedAccess=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.limitedAccess), input_name="limitedAccess" + ) + ), + ) + ) + if self.excessLength is not None and "excessLength" not in already_processed: + already_processed.add("excessLength") + outfile.write( + " excessLength=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.excessLength), input_name="excessLength" + ) + ), + ) + ) + if ( + self.crossBorderFee is not None + and "crossBorderFee" not in already_processed + ): + already_processed.add("crossBorderFee") + outfile.write( + " crossBorderFee=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.crossBorderFee), + input_name="crossBorderFee", + ) + ), + ) + ) + if ( + self.singleShipment is not None + and "singleShipment" not in already_processed + ): + already_processed.add("singleShipment") + outfile.write( + " singleShipment=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.singleShipment), + input_name="singleShipment", + ) + ), + ) + ) + if ( + self.saturdayPickupRequired is not None + and "saturdayPickupRequired" not in already_processed + ): + already_processed.add("saturdayPickupRequired") + outfile.write( + " saturdayPickupRequired=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.saturdayPickupRequired), + input_name="saturdayPickupRequired", + ) + ), + ) + ) + if self.insidePickup is not None and "insidePickup" not in already_processed: + already_processed.add("insidePickup") + outfile.write( + " insidePickup=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.insidePickup), input_name="insidePickup" + ) + ), + ) + ) + if self.insuranceType is not None and "insuranceType" not in already_processed: + already_processed.add("insuranceType") + outfile.write( + " insuranceType=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.insuranceType), input_name="insuranceType" + ) + ), + ) + ) + if ( + self.scheduledShipDate is not None + and "scheduledShipDate" not in already_processed + ): + already_processed.add("scheduledShipDate") + outfile.write( + " scheduledShipDate=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.scheduledShipDate), + input_name="scheduledShipDate", + ) + ), + ) + ) + if ( + self.insideDelivery is not None + and "insideDelivery" not in already_processed + ): + already_processed.add("insideDelivery") + outfile.write( + " insideDelivery=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.insideDelivery), + input_name="insideDelivery", + ) + ), + ) + ) + if ( + self.deliveryAppointment is not None + and "deliveryAppointment" not in already_processed + ): + already_processed.add("deliveryAppointment") + outfile.write( + " deliveryAppointment=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.deliveryAppointment), + input_name="deliveryAppointment", + ) + ), + ) + ) + if ( + self.specialEquipment is not None + and "specialEquipment" not in already_processed + ): + already_processed.add("specialEquipment") + outfile.write( + " specialEquipment=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.specialEquipment), + input_name="specialEquipment", + ) + ), + ) + ) + if ( + self.holdForPickupRequired is not None + and "holdForPickupRequired" not in already_processed + ): + already_processed.add("holdForPickupRequired") + outfile.write( + " holdForPickupRequired=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.holdForPickupRequired), + input_name="holdForPickupRequired", + ) + ), + ) + ) + if ( + self.signatureRequired is not None + and "signatureRequired" not in already_processed + ): + already_processed.add("signatureRequired") + outfile.write( + " signatureRequired=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.signatureRequired), + input_name="signatureRequired", + ) + ), + ) + ) + if ( + self.isSaturdayService is not None + and "isSaturdayService" not in already_processed + ): + already_processed.add("isSaturdayService") + outfile.write( + " isSaturdayService=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.isSaturdayService), + input_name="isSaturdayService", + ) + ), + ) + ) + if ( + self.dangerousGoodsType is not None + and "dangerousGoodsType" not in already_processed + ): + already_processed.add("dangerousGoodsType") + outfile.write( + " dangerousGoodsType=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.dangerousGoodsType), + input_name="dangerousGoodsType", + ) + ), + ) + ) + if self.stackable is not None and "stackable" not in already_processed: + already_processed.add("stackable") + outfile.write( + " stackable=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.stackable), input_name="stackable" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="QuoteRequestType", + fromsubclass_=False, + pretty_print=True, + ): + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.From is not None: + namespaceprefix_ = ( + self.From_nsprefix_ + ":" + if (UseCapturedNS_ and self.From_nsprefix_) + else "" + ) + self.From.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="From", + pretty_print=pretty_print, + ) + if self.To is not None: + namespaceprefix_ = ( + self.To_nsprefix_ + ":" + if (UseCapturedNS_ and self.To_nsprefix_) + else "" + ) + self.To.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="To", + pretty_print=pretty_print, + ) + if self.COD is not None: + namespaceprefix_ = ( + self.COD_nsprefix_ + ":" + if (UseCapturedNS_ and self.COD_nsprefix_) + else "" + ) + self.COD.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="COD", + pretty_print=pretty_print, + ) + if self.Packages is not None: + namespaceprefix_ = ( + self.Packages_nsprefix_ + ":" + if (UseCapturedNS_ and self.Packages_nsprefix_) + else "" + ) + self.Packages.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Packages", + pretty_print=pretty_print, + ) + if self.Pickup is not None: + namespaceprefix_ = ( + self.Pickup_nsprefix_ + ":" + if (UseCapturedNS_ and self.Pickup_nsprefix_) + else "" + ) + self.Pickup.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Pickup", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("serviceId", node) + if value is not None and "serviceId" not in already_processed: + already_processed.add("serviceId") + self.serviceId = self.gds_parse_integer(value, node, "serviceId") + value = find_attr_value_("SortandSegregateCharge", node) + if value is not None and "SortandSegregateCharge" not in already_processed: + already_processed.add("SortandSegregateCharge") + self.SortandSegregateCharge = value + value = find_attr_value_("homelandSecurity", node) + if value is not None and "homelandSecurity" not in already_processed: + already_processed.add("homelandSecurity") + self.homelandSecurity = value + value = find_attr_value_("pierCharge", node) + if value is not None and "pierCharge" not in already_processed: + already_processed.add("pierCharge") + self.pierCharge = value + value = find_attr_value_("exhibitionConventionSite", node) + if value is not None and "exhibitionConventionSite" not in already_processed: + already_processed.add("exhibitionConventionSite") + self.exhibitionConventionSite = value + value = find_attr_value_("militaryBaseDelivery", node) + if value is not None and "militaryBaseDelivery" not in already_processed: + already_processed.add("militaryBaseDelivery") + self.militaryBaseDelivery = value + value = find_attr_value_("customsIn-bondFreight", node) + if value is not None and "customsIn-bondFreight" not in already_processed: + already_processed.add("customsIn-bondFreight") + self.customsIn_bondFreight = value + value = find_attr_value_("limitedAccess", node) + if value is not None and "limitedAccess" not in already_processed: + already_processed.add("limitedAccess") + self.limitedAccess = value + value = find_attr_value_("excessLength", node) + if value is not None and "excessLength" not in already_processed: + already_processed.add("excessLength") + self.excessLength = value + value = find_attr_value_("crossBorderFee", node) + if value is not None and "crossBorderFee" not in already_processed: + already_processed.add("crossBorderFee") + self.crossBorderFee = value + value = find_attr_value_("singleShipment", node) + if value is not None and "singleShipment" not in already_processed: + already_processed.add("singleShipment") + self.singleShipment = value + value = find_attr_value_("saturdayPickupRequired", node) + if value is not None and "saturdayPickupRequired" not in already_processed: + already_processed.add("saturdayPickupRequired") + self.saturdayPickupRequired = value + value = find_attr_value_("insidePickup", node) + if value is not None and "insidePickup" not in already_processed: + already_processed.add("insidePickup") + self.insidePickup = value + value = find_attr_value_("insuranceType", node) + if value is not None and "insuranceType" not in already_processed: + already_processed.add("insuranceType") + self.insuranceType = value + value = find_attr_value_("scheduledShipDate", node) + if value is not None and "scheduledShipDate" not in already_processed: + already_processed.add("scheduledShipDate") + self.scheduledShipDate = value + value = find_attr_value_("insideDelivery", node) + if value is not None and "insideDelivery" not in already_processed: + already_processed.add("insideDelivery") + self.insideDelivery = value + value = find_attr_value_("deliveryAppointment", node) + if value is not None and "deliveryAppointment" not in already_processed: + already_processed.add("deliveryAppointment") + self.deliveryAppointment = value + value = find_attr_value_("specialEquipment", node) + if value is not None and "specialEquipment" not in already_processed: + already_processed.add("specialEquipment") + self.specialEquipment = value + value = find_attr_value_("holdForPickupRequired", node) + if value is not None and "holdForPickupRequired" not in already_processed: + already_processed.add("holdForPickupRequired") + self.holdForPickupRequired = value + value = find_attr_value_("signatureRequired", node) + if value is not None and "signatureRequired" not in already_processed: + already_processed.add("signatureRequired") + self.signatureRequired = value + value = find_attr_value_("isSaturdayService", node) + if value is not None and "isSaturdayService" not in already_processed: + already_processed.add("isSaturdayService") + self.isSaturdayService = value + value = find_attr_value_("dangerousGoodsType", node) + if value is not None and "dangerousGoodsType" not in already_processed: + already_processed.add("dangerousGoodsType") + self.dangerousGoodsType = value + value = find_attr_value_("stackable", node) + if value is not None and "stackable" not in already_processed: + already_processed.add("stackable") + self.stackable = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "From": + obj_ = FromType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.From = obj_ + obj_.original_tagname_ = "From" + elif nodeName_ == "To": + obj_ = ToType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.To = obj_ + obj_.original_tagname_ = "To" + elif nodeName_ == "COD": + obj_ = CODType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.COD = obj_ + obj_.original_tagname_ = "COD" + elif nodeName_ == "Packages": + obj_ = PackagesType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Packages = obj_ + obj_.original_tagname_ = "Packages" + elif nodeName_ == "Pickup": + obj_ = PickupType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Pickup = obj_ + obj_.original_tagname_ = "Pickup" + + +# end class QuoteRequestType + + +class FromType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, + id=None, + company=None, + email=None, + attention=None, + phone=None, + tailgateRequired=None, + residential=None, + confirmDelivery=None, + instructions=None, + address1=None, + address2=None, + city=None, + state=None, + country=None, + zip=None, + valueOf_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.id = _cast(None, id) + self.id_nsprefix_ = None + self.company = _cast(None, company) + self.company_nsprefix_ = None + self.email = _cast(None, email) + self.email_nsprefix_ = None + self.attention = _cast(None, attention) + self.attention_nsprefix_ = None + self.phone = _cast(None, phone) + self.phone_nsprefix_ = None + self.tailgateRequired = _cast(None, tailgateRequired) + self.tailgateRequired_nsprefix_ = None + self.residential = _cast(None, residential) + self.residential_nsprefix_ = None + self.confirmDelivery = _cast(None, confirmDelivery) + self.confirmDelivery_nsprefix_ = None + self.instructions = _cast(None, instructions) + self.instructions_nsprefix_ = None + self.address1 = _cast(None, address1) + self.address1_nsprefix_ = None + self.address2 = _cast(None, address2) + self.address2_nsprefix_ = None + self.city = _cast(None, city) + self.city_nsprefix_ = None + self.state = _cast(None, state) + self.state_nsprefix_ = None + self.country = _cast(None, country) + self.country_nsprefix_ = None + self.zip = _cast(None, zip) + self.zip_nsprefix_ = None + self.valueOf_ = valueOf_ + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, FromType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if FromType.subclass: + return FromType.subclass(*args_, **kwargs_) + else: + return FromType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_id(self): + return self.id + + def set_id(self, id): + self.id = id + + def get_company(self): + return self.company + + def set_company(self, company): + self.company = company + + def get_email(self): + return self.email + + def set_email(self, email): + self.email = email + + def get_attention(self): + return self.attention + + def set_attention(self, attention): + self.attention = attention + + def get_phone(self): + return self.phone + + def set_phone(self, phone): + self.phone = phone + + def get_tailgateRequired(self): + return self.tailgateRequired + + def set_tailgateRequired(self, tailgateRequired): + self.tailgateRequired = tailgateRequired + + def get_residential(self): + return self.residential + + def set_residential(self, residential): + self.residential = residential + + def get_confirmDelivery(self): + return self.confirmDelivery + + def set_confirmDelivery(self, confirmDelivery): + self.confirmDelivery = confirmDelivery + + def get_instructions(self): + return self.instructions + + def set_instructions(self, instructions): + self.instructions = instructions + + def get_address1(self): + return self.address1 + + def set_address1(self, address1): + self.address1 = address1 + + def get_address2(self): + return self.address2 + + def set_address2(self, address2): + self.address2 = address2 + + def get_city(self): + return self.city + + def set_city(self, city): + self.city = city + + def get_state(self): + return self.state + + def set_state(self, state): + self.state = state + + def get_country(self): + return self.country + + def set_country(self, country): + self.country = country + + def get_zip(self): + return self.zip + + def set_zip(self, zip): + self.zip = zip + + def get_valueOf_(self): + return self.valueOf_ + + def set_valueOf_(self, valueOf_): + self.valueOf_ = valueOf_ + + def _hasContent(self): + if 1 if type(self.valueOf_) in [int, float] else self.valueOf_: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="FromType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("FromType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "FromType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="FromType" + ) + if self._hasContent(): + outfile.write(">") + outfile.write(self.convert_unicode(self.valueOf_)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="FromType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="FromType" + ): + if self.id is not None and "id" not in already_processed: + already_processed.add("id") + outfile.write( + " id=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.id), input_name="id") + ), + ) + ) + if self.company is not None and "company" not in already_processed: + already_processed.add("company") + outfile.write( + " company=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.company), input_name="company" + ) + ), + ) + ) + if self.email is not None and "email" not in already_processed: + already_processed.add("email") + outfile.write( + " email=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.email), input_name="email" + ) + ), + ) + ) + if self.attention is not None and "attention" not in already_processed: + already_processed.add("attention") + outfile.write( + " attention=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.attention), input_name="attention" + ) + ), + ) + ) + if self.phone is not None and "phone" not in already_processed: + already_processed.add("phone") + outfile.write( + " phone=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.phone), input_name="phone" + ) + ), + ) + ) + if ( + self.tailgateRequired is not None + and "tailgateRequired" not in already_processed + ): + already_processed.add("tailgateRequired") + outfile.write( + " tailgateRequired=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.tailgateRequired), + input_name="tailgateRequired", + ) + ), + ) + ) + if self.residential is not None and "residential" not in already_processed: + already_processed.add("residential") + outfile.write( + " residential=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.residential), input_name="residential" + ) + ), + ) + ) + if ( + self.confirmDelivery is not None + and "confirmDelivery" not in already_processed + ): + already_processed.add("confirmDelivery") + outfile.write( + " confirmDelivery=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.confirmDelivery), + input_name="confirmDelivery", + ) + ), + ) + ) + if self.instructions is not None and "instructions" not in already_processed: + already_processed.add("instructions") + outfile.write( + " instructions=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.instructions), input_name="instructions" + ) + ), + ) + ) + if self.address1 is not None and "address1" not in already_processed: + already_processed.add("address1") + outfile.write( + " address1=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.address1), input_name="address1" + ) + ), + ) + ) + if self.address2 is not None and "address2" not in already_processed: + already_processed.add("address2") + outfile.write( + " address2=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.address2), input_name="address2" + ) + ), + ) + ) + if self.city is not None and "city" not in already_processed: + already_processed.add("city") + outfile.write( + " city=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.city), input_name="city" + ) + ), + ) + ) + if self.state is not None and "state" not in already_processed: + already_processed.add("state") + outfile.write( + " state=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.state), input_name="state" + ) + ), + ) + ) + if self.country is not None and "country" not in already_processed: + already_processed.add("country") + outfile.write( + " country=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.country), input_name="country" + ) + ), + ) + ) + if self.zip is not None and "zip" not in already_processed: + already_processed.add("zip") + outfile.write( + " zip=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.zip), input_name="zip") + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="FromType", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("id", node) + if value is not None and "id" not in already_processed: + already_processed.add("id") + self.id = value + value = find_attr_value_("company", node) + if value is not None and "company" not in already_processed: + already_processed.add("company") + self.company = value + value = find_attr_value_("email", node) + if value is not None and "email" not in already_processed: + already_processed.add("email") + self.email = value + value = find_attr_value_("attention", node) + if value is not None and "attention" not in already_processed: + already_processed.add("attention") + self.attention = value + value = find_attr_value_("phone", node) + if value is not None and "phone" not in already_processed: + already_processed.add("phone") + self.phone = value + value = find_attr_value_("tailgateRequired", node) + if value is not None and "tailgateRequired" not in already_processed: + already_processed.add("tailgateRequired") + self.tailgateRequired = value + value = find_attr_value_("residential", node) + if value is not None and "residential" not in already_processed: + already_processed.add("residential") + self.residential = value + value = find_attr_value_("confirmDelivery", node) + if value is not None and "confirmDelivery" not in already_processed: + already_processed.add("confirmDelivery") + self.confirmDelivery = value + value = find_attr_value_("instructions", node) + if value is not None and "instructions" not in already_processed: + already_processed.add("instructions") + self.instructions = value + value = find_attr_value_("address1", node) + if value is not None and "address1" not in already_processed: + already_processed.add("address1") + self.address1 = value + value = find_attr_value_("address2", node) + if value is not None and "address2" not in already_processed: + already_processed.add("address2") + self.address2 = value + value = find_attr_value_("city", node) + if value is not None and "city" not in already_processed: + already_processed.add("city") + self.city = value + value = find_attr_value_("state", node) + if value is not None and "state" not in already_processed: + already_processed.add("state") + self.state = value + value = find_attr_value_("country", node) + if value is not None and "country" not in already_processed: + already_processed.add("country") + self.country = value + value = find_attr_value_("zip", node) + if value is not None and "zip" not in already_processed: + already_processed.add("zip") + self.zip = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class FromType + + +class ToType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, + id=None, + company=None, + email=None, + notifyRecipient=None, + attention=None, + phone=None, + tailgateRequired=None, + residential=None, + instructions=None, + address1=None, + address2=None, + city=None, + state=None, + country=None, + zip=None, + valueOf_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.id = _cast(None, id) + self.id_nsprefix_ = None + self.company = _cast(None, company) + self.company_nsprefix_ = None + self.email = _cast(None, email) + self.email_nsprefix_ = None + self.notifyRecipient = _cast(None, notifyRecipient) + self.notifyRecipient_nsprefix_ = None + self.attention = _cast(None, attention) + self.attention_nsprefix_ = None + self.phone = _cast(None, phone) + self.phone_nsprefix_ = None + self.tailgateRequired = _cast(None, tailgateRequired) + self.tailgateRequired_nsprefix_ = None + self.residential = _cast(None, residential) + self.residential_nsprefix_ = None + self.instructions = _cast(None, instructions) + self.instructions_nsprefix_ = None + self.address1 = _cast(None, address1) + self.address1_nsprefix_ = None + self.address2 = _cast(None, address2) + self.address2_nsprefix_ = None + self.city = _cast(None, city) + self.city_nsprefix_ = None + self.state = _cast(None, state) + self.state_nsprefix_ = None + self.country = _cast(None, country) + self.country_nsprefix_ = None + self.zip = _cast(None, zip) + self.zip_nsprefix_ = None + self.valueOf_ = valueOf_ + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, ToType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ToType.subclass: + return ToType.subclass(*args_, **kwargs_) + else: + return ToType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_id(self): + return self.id + + def set_id(self, id): + self.id = id + + def get_company(self): + return self.company + + def set_company(self, company): + self.company = company + + def get_email(self): + return self.email + + def set_email(self, email): + self.email = email + + def get_notifyRecipient(self): + return self.notifyRecipient + + def set_notifyRecipient(self, notifyRecipient): + self.notifyRecipient = notifyRecipient + + def get_attention(self): + return self.attention + + def set_attention(self, attention): + self.attention = attention + + def get_phone(self): + return self.phone + + def set_phone(self, phone): + self.phone = phone + + def get_tailgateRequired(self): + return self.tailgateRequired + + def set_tailgateRequired(self, tailgateRequired): + self.tailgateRequired = tailgateRequired + + def get_residential(self): + return self.residential + + def set_residential(self, residential): + self.residential = residential + + def get_instructions(self): + return self.instructions + + def set_instructions(self, instructions): + self.instructions = instructions + + def get_address1(self): + return self.address1 + + def set_address1(self, address1): + self.address1 = address1 + + def get_address2(self): + return self.address2 + + def set_address2(self, address2): + self.address2 = address2 + + def get_city(self): + return self.city + + def set_city(self, city): + self.city = city + + def get_state(self): + return self.state + + def set_state(self, state): + self.state = state + + def get_country(self): + return self.country + + def set_country(self, country): + self.country = country + + def get_zip(self): + return self.zip + + def set_zip(self, zip): + self.zip = zip + + def get_valueOf_(self): + return self.valueOf_ + + def set_valueOf_(self, valueOf_): + self.valueOf_ = valueOf_ + + def _hasContent(self): + if 1 if type(self.valueOf_) in [int, float] else self.valueOf_: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ToType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ToType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ToType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ToType" + ) + if self._hasContent(): + outfile.write(">") + outfile.write(self.convert_unicode(self.valueOf_)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ToType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="ToType" + ): + if self.id is not None and "id" not in already_processed: + already_processed.add("id") + outfile.write( + " id=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.id), input_name="id") + ), + ) + ) + if self.company is not None and "company" not in already_processed: + already_processed.add("company") + outfile.write( + " company=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.company), input_name="company" + ) + ), + ) + ) + if self.email is not None and "email" not in already_processed: + already_processed.add("email") + outfile.write( + " email=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.email), input_name="email" + ) + ), + ) + ) + if ( + self.notifyRecipient is not None + and "notifyRecipient" not in already_processed + ): + already_processed.add("notifyRecipient") + outfile.write( + " notifyRecipient=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.notifyRecipient), + input_name="notifyRecipient", + ) + ), + ) + ) + if self.attention is not None and "attention" not in already_processed: + already_processed.add("attention") + outfile.write( + " attention=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.attention), input_name="attention" + ) + ), + ) + ) + if self.phone is not None and "phone" not in already_processed: + already_processed.add("phone") + outfile.write( + " phone=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.phone), input_name="phone" + ) + ), + ) + ) + if ( + self.tailgateRequired is not None + and "tailgateRequired" not in already_processed + ): + already_processed.add("tailgateRequired") + outfile.write( + " tailgateRequired=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.tailgateRequired), + input_name="tailgateRequired", + ) + ), + ) + ) + if self.residential is not None and "residential" not in already_processed: + already_processed.add("residential") + outfile.write( + " residential=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.residential), input_name="residential" + ) + ), + ) + ) + if self.instructions is not None and "instructions" not in already_processed: + already_processed.add("instructions") + outfile.write( + " instructions=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.instructions), input_name="instructions" + ) + ), + ) + ) + if self.address1 is not None and "address1" not in already_processed: + already_processed.add("address1") + outfile.write( + " address1=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.address1), input_name="address1" + ) + ), + ) + ) + if self.address2 is not None and "address2" not in already_processed: + already_processed.add("address2") + outfile.write( + " address2=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.address2), input_name="address2" + ) + ), + ) + ) + if self.city is not None and "city" not in already_processed: + already_processed.add("city") + outfile.write( + " city=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.city), input_name="city" + ) + ), + ) + ) + if self.state is not None and "state" not in already_processed: + already_processed.add("state") + outfile.write( + " state=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.state), input_name="state" + ) + ), + ) + ) + if self.country is not None and "country" not in already_processed: + already_processed.add("country") + outfile.write( + " country=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.country), input_name="country" + ) + ), + ) + ) + if self.zip is not None and "zip" not in already_processed: + already_processed.add("zip") + outfile.write( + " zip=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.zip), input_name="zip") + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ToType", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("id", node) + if value is not None and "id" not in already_processed: + already_processed.add("id") + self.id = value + value = find_attr_value_("company", node) + if value is not None and "company" not in already_processed: + already_processed.add("company") + self.company = value + value = find_attr_value_("email", node) + if value is not None and "email" not in already_processed: + already_processed.add("email") + self.email = value + value = find_attr_value_("notifyRecipient", node) + if value is not None and "notifyRecipient" not in already_processed: + already_processed.add("notifyRecipient") + self.notifyRecipient = value + value = find_attr_value_("attention", node) + if value is not None and "attention" not in already_processed: + already_processed.add("attention") + self.attention = value + value = find_attr_value_("phone", node) + if value is not None and "phone" not in already_processed: + already_processed.add("phone") + self.phone = value + value = find_attr_value_("tailgateRequired", node) + if value is not None and "tailgateRequired" not in already_processed: + already_processed.add("tailgateRequired") + self.tailgateRequired = value + value = find_attr_value_("residential", node) + if value is not None and "residential" not in already_processed: + already_processed.add("residential") + self.residential = value + value = find_attr_value_("instructions", node) + if value is not None and "instructions" not in already_processed: + already_processed.add("instructions") + self.instructions = value + value = find_attr_value_("address1", node) + if value is not None and "address1" not in already_processed: + already_processed.add("address1") + self.address1 = value + value = find_attr_value_("address2", node) + if value is not None and "address2" not in already_processed: + already_processed.add("address2") + self.address2 = value + value = find_attr_value_("city", node) + if value is not None and "city" not in already_processed: + already_processed.add("city") + self.city = value + value = find_attr_value_("state", node) + if value is not None and "state" not in already_processed: + already_processed.add("state") + self.state = value + value = find_attr_value_("country", node) + if value is not None and "country" not in already_processed: + already_processed.add("country") + self.country = value + value = find_attr_value_("zip", node) + if value is not None and "zip" not in already_processed: + already_processed.add("zip") + self.zip = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class ToType + + +class CODType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, paymentType=None, CODReturnAddress=None, gds_collector_=None, **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.paymentType = _cast(None, paymentType) + self.paymentType_nsprefix_ = None + self.CODReturnAddress = CODReturnAddress + self.CODReturnAddress_nsprefix_ = None + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, CODType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CODType.subclass: + return CODType.subclass(*args_, **kwargs_) + else: + return CODType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_CODReturnAddress(self): + return self.CODReturnAddress + + def set_CODReturnAddress(self, CODReturnAddress): + self.CODReturnAddress = CODReturnAddress + + def get_paymentType(self): + return self.paymentType + + def set_paymentType(self, paymentType): + self.paymentType = paymentType + + def _hasContent(self): + if self.CODReturnAddress is not None: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="CODType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("CODType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "CODType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="CODType" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="CODType", + pretty_print=pretty_print, + ) + showIndent(outfile, level, pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="CODType" + ): + if self.paymentType is not None and "paymentType" not in already_processed: + already_processed.add("paymentType") + outfile.write( + " paymentType=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.paymentType), input_name="paymentType" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="CODType", + fromsubclass_=False, + pretty_print=True, + ): + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.CODReturnAddress is not None: + namespaceprefix_ = ( + self.CODReturnAddress_nsprefix_ + ":" + if (UseCapturedNS_ and self.CODReturnAddress_nsprefix_) + else "" + ) + self.CODReturnAddress.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="CODReturnAddress", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("paymentType", node) + if value is not None and "paymentType" not in already_processed: + already_processed.add("paymentType") + self.paymentType = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "CODReturnAddress": + obj_ = CODReturnAddressType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.CODReturnAddress = obj_ + obj_.original_tagname_ = "CODReturnAddress" + + +# end class CODType + + +class CODReturnAddressType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, + codCompany=None, + codName=None, + codAddress1=None, + codCity=None, + codStateCode=None, + codZip=None, + codCountry=None, + valueOf_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.codCompany = _cast(None, codCompany) + self.codCompany_nsprefix_ = None + self.codName = _cast(None, codName) + self.codName_nsprefix_ = None + self.codAddress1 = _cast(None, codAddress1) + self.codAddress1_nsprefix_ = None + self.codCity = _cast(None, codCity) + self.codCity_nsprefix_ = None + self.codStateCode = _cast(None, codStateCode) + self.codStateCode_nsprefix_ = None + self.codZip = _cast(None, codZip) + self.codZip_nsprefix_ = None + self.codCountry = _cast(None, codCountry) + self.codCountry_nsprefix_ = None + self.valueOf_ = valueOf_ + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CODReturnAddressType + ) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CODReturnAddressType.subclass: + return CODReturnAddressType.subclass(*args_, **kwargs_) + else: + return CODReturnAddressType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_codCompany(self): + return self.codCompany + + def set_codCompany(self, codCompany): + self.codCompany = codCompany + + def get_codName(self): + return self.codName + + def set_codName(self, codName): + self.codName = codName + + def get_codAddress1(self): + return self.codAddress1 + + def set_codAddress1(self, codAddress1): + self.codAddress1 = codAddress1 + + def get_codCity(self): + return self.codCity + + def set_codCity(self, codCity): + self.codCity = codCity + + def get_codStateCode(self): + return self.codStateCode + + def set_codStateCode(self, codStateCode): + self.codStateCode = codStateCode + + def get_codZip(self): + return self.codZip + + def set_codZip(self, codZip): + self.codZip = codZip + + def get_codCountry(self): + return self.codCountry + + def set_codCountry(self, codCountry): + self.codCountry = codCountry + + def get_valueOf_(self): + return self.valueOf_ + + def set_valueOf_(self, valueOf_): + self.valueOf_ = valueOf_ + + def _hasContent(self): + if 1 if type(self.valueOf_) in [int, float] else self.valueOf_: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="CODReturnAddressType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("CODReturnAddressType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "CODReturnAddressType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="CODReturnAddressType", + ) + if self._hasContent(): + outfile.write(">") + outfile.write(self.convert_unicode(self.valueOf_)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="CODReturnAddressType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="CODReturnAddressType", + ): + if self.codCompany is not None and "codCompany" not in already_processed: + already_processed.add("codCompany") + outfile.write( + " codCompany=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.codCompany), input_name="codCompany" + ) + ), + ) + ) + if self.codName is not None and "codName" not in already_processed: + already_processed.add("codName") + outfile.write( + " codName=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.codName), input_name="codName" + ) + ), + ) + ) + if self.codAddress1 is not None and "codAddress1" not in already_processed: + already_processed.add("codAddress1") + outfile.write( + " codAddress1=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.codAddress1), input_name="codAddress1" + ) + ), + ) + ) + if self.codCity is not None and "codCity" not in already_processed: + already_processed.add("codCity") + outfile.write( + " codCity=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.codCity), input_name="codCity" + ) + ), + ) + ) + if self.codStateCode is not None and "codStateCode" not in already_processed: + already_processed.add("codStateCode") + outfile.write( + " codStateCode=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.codStateCode), input_name="codStateCode" + ) + ), + ) + ) + if self.codZip is not None and "codZip" not in already_processed: + already_processed.add("codZip") + outfile.write( + " codZip=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.codZip), input_name="codZip" + ) + ), + ) + ) + if self.codCountry is not None and "codCountry" not in already_processed: + already_processed.add("codCountry") + outfile.write( + " codCountry=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.codCountry), input_name="codCountry" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="CODReturnAddressType", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("codCompany", node) + if value is not None and "codCompany" not in already_processed: + already_processed.add("codCompany") + self.codCompany = value + value = find_attr_value_("codName", node) + if value is not None and "codName" not in already_processed: + already_processed.add("codName") + self.codName = value + value = find_attr_value_("codAddress1", node) + if value is not None and "codAddress1" not in already_processed: + already_processed.add("codAddress1") + self.codAddress1 = value + value = find_attr_value_("codCity", node) + if value is not None and "codCity" not in already_processed: + already_processed.add("codCity") + self.codCity = value + value = find_attr_value_("codStateCode", node) + if value is not None and "codStateCode" not in already_processed: + already_processed.add("codStateCode") + self.codStateCode = value + value = find_attr_value_("codZip", node) + if value is not None and "codZip" not in already_processed: + already_processed.add("codZip") + self.codZip = value + value = find_attr_value_("codCountry", node) + if value is not None and "codCountry" not in already_processed: + already_processed.add("codCountry") + self.codCountry = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class CODReturnAddressType + + +class PackagesType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__(self, type_=None, Package=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.type_ = _cast(None, type_) + self.type__nsprefix_ = None + if Package is None: + self.Package = [] + else: + self.Package = Package + self.Package_nsprefix_ = None + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, PackagesType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PackagesType.subclass: + return PackagesType.subclass(*args_, **kwargs_) + else: + return PackagesType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_Package(self): + return self.Package + + def set_Package(self, Package): + self.Package = Package + + def add_Package(self, value): + self.Package.append(value) + + def insert_Package_at(self, index, value): + self.Package.insert(index, value) + + def replace_Package_at(self, index, value): + self.Package[index] = value + + def get_type(self): + return self.type_ + + def set_type(self, type_): + self.type_ = type_ + + def _hasContent(self): + if self.Package: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PackagesType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("PackagesType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "PackagesType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="PackagesType" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="PackagesType", + pretty_print=pretty_print, + ) + showIndent(outfile, level, pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="PackagesType", + ): + if self.type_ is not None and "type_" not in already_processed: + already_processed.add("type_") + outfile.write( + " type=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.type_), input_name="type" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PackagesType", + fromsubclass_=False, + pretty_print=True, + ): + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + for Package_ in self.Package: + namespaceprefix_ = ( + self.Package_nsprefix_ + ":" + if (UseCapturedNS_ and self.Package_nsprefix_) + else "" + ) + Package_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Package", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("type", node) + if value is not None and "type" not in already_processed: + already_processed.add("type") + self.type_ = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "Package": + obj_ = PackageType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Package.append(obj_) + obj_.original_tagname_ = "Package" + + +# end class PackagesType + + +class PackageType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, + length=None, + width=None, + height=None, + weight=None, + type_=None, + freightClass=None, + nmfcCode=None, + insuranceAmount=None, + codAmount=None, + description=None, + weightOz=None, + valueOf_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.length = _cast(int, length) + self.length_nsprefix_ = None + self.width = _cast(int, width) + self.width_nsprefix_ = None + self.height = _cast(int, height) + self.height_nsprefix_ = None + self.weight = _cast(int, weight) + self.weight_nsprefix_ = None + self.type_ = _cast(None, type_) + self.type__nsprefix_ = None + self.freightClass = _cast(int, freightClass) + self.freightClass_nsprefix_ = None + self.nmfcCode = _cast(int, nmfcCode) + self.nmfcCode_nsprefix_ = None + self.insuranceAmount = _cast(float, insuranceAmount) + self.insuranceAmount_nsprefix_ = None + self.codAmount = _cast(float, codAmount) + self.codAmount_nsprefix_ = None + self.description = _cast(None, description) + self.description_nsprefix_ = None + self.weightOz = _cast(float, weightOz) + self.weightOz_nsprefix_ = None + self.valueOf_ = valueOf_ + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, PackageType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PackageType.subclass: + return PackageType.subclass(*args_, **kwargs_) + else: + return PackageType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_length(self): + return self.length + + def set_length(self, length): + self.length = length + + def get_width(self): + return self.width + + def set_width(self, width): + self.width = width + + def get_height(self): + return self.height + + def set_height(self, height): + self.height = height + + def get_weight(self): + return self.weight + + def set_weight(self, weight): + self.weight = weight + + def get_type(self): + return self.type_ + + def set_type(self, type_): + self.type_ = type_ + + def get_freightClass(self): + return self.freightClass + + def set_freightClass(self, freightClass): + self.freightClass = freightClass + + def get_nmfcCode(self): + return self.nmfcCode + + def set_nmfcCode(self, nmfcCode): + self.nmfcCode = nmfcCode + + def get_insuranceAmount(self): + return self.insuranceAmount + + def set_insuranceAmount(self, insuranceAmount): + self.insuranceAmount = insuranceAmount + + def get_codAmount(self): + return self.codAmount + + def set_codAmount(self, codAmount): + self.codAmount = codAmount + + def get_description(self): + return self.description + + def set_description(self, description): + self.description = description + + def get_weightOz(self): + return self.weightOz + + def set_weightOz(self, weightOz): + self.weightOz = weightOz + + def get_valueOf_(self): + return self.valueOf_ + + def set_valueOf_(self, valueOf_): + self.valueOf_ = valueOf_ + + def _hasContent(self): + if 1 if type(self.valueOf_) in [int, float] else self.valueOf_: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PackageType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("PackageType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "PackageType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="PackageType" + ) + if self._hasContent(): + outfile.write(">") + outfile.write(self.convert_unicode(self.valueOf_)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="PackageType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="PackageType", + ): + if self.length is not None and "length" not in already_processed: + already_processed.add("length") + outfile.write( + ' length="%s"' + % self.gds_format_integer(self.length, input_name="length") + ) + if self.width is not None and "width" not in already_processed: + already_processed.add("width") + outfile.write( + ' width="%s"' % self.gds_format_integer(self.width, input_name="width") + ) + if self.height is not None and "height" not in already_processed: + already_processed.add("height") + outfile.write( + ' height="%s"' + % self.gds_format_integer(self.height, input_name="height") + ) + if self.weight is not None and "weight" not in already_processed: + already_processed.add("weight") + outfile.write( + ' weight="%s"' + % self.gds_format_integer(self.weight, input_name="weight") + ) + if self.type_ is not None and "type_" not in already_processed: + already_processed.add("type_") + outfile.write( + " type=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.type_), input_name="type" + ) + ), + ) + ) + if self.freightClass is not None and "freightClass" not in already_processed: + already_processed.add("freightClass") + outfile.write( + ' freightClass="%s"' + % self.gds_format_integer(self.freightClass, input_name="freightClass") + ) + if self.nmfcCode is not None and "nmfcCode" not in already_processed: + already_processed.add("nmfcCode") + outfile.write( + ' nmfcCode="%s"' + % self.gds_format_integer(self.nmfcCode, input_name="nmfcCode") + ) + if ( + self.insuranceAmount is not None + and "insuranceAmount" not in already_processed + ): + already_processed.add("insuranceAmount") + outfile.write( + ' insuranceAmount="%s"' + % self.gds_format_float( + self.insuranceAmount, input_name="insuranceAmount" + ) + ) + if self.codAmount is not None and "codAmount" not in already_processed: + already_processed.add("codAmount") + outfile.write( + ' codAmount="%s"' + % self.gds_format_float(self.codAmount, input_name="codAmount") + ) + if self.description is not None and "description" not in already_processed: + already_processed.add("description") + outfile.write( + " description=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.description), input_name="description" + ) + ), + ) + ) + if self.weightOz is not None and "weightOz" not in already_processed: + already_processed.add("weightOz") + outfile.write( + ' weightOz="%s"' + % self.gds_format_float(self.weightOz, input_name="weightOz") + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PackageType", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("length", node) + if value is not None and "length" not in already_processed: + already_processed.add("length") + self.length = self.gds_parse_integer(value, node, "length") + value = find_attr_value_("width", node) + if value is not None and "width" not in already_processed: + already_processed.add("width") + self.width = self.gds_parse_integer(value, node, "width") + value = find_attr_value_("height", node) + if value is not None and "height" not in already_processed: + already_processed.add("height") + self.height = self.gds_parse_integer(value, node, "height") + value = find_attr_value_("weight", node) + if value is not None and "weight" not in already_processed: + already_processed.add("weight") + self.weight = self.gds_parse_integer(value, node, "weight") + value = find_attr_value_("type", node) + if value is not None and "type" not in already_processed: + already_processed.add("type") + self.type_ = value + value = find_attr_value_("freightClass", node) + if value is not None and "freightClass" not in already_processed: + already_processed.add("freightClass") + self.freightClass = self.gds_parse_integer(value, node, "freightClass") + value = find_attr_value_("nmfcCode", node) + if value is not None and "nmfcCode" not in already_processed: + already_processed.add("nmfcCode") + self.nmfcCode = self.gds_parse_integer(value, node, "nmfcCode") + value = find_attr_value_("insuranceAmount", node) + if value is not None and "insuranceAmount" not in already_processed: + already_processed.add("insuranceAmount") + value = self.gds_parse_float(value, node, "insuranceAmount") + self.insuranceAmount = value + value = find_attr_value_("codAmount", node) + if value is not None and "codAmount" not in already_processed: + already_processed.add("codAmount") + value = self.gds_parse_float(value, node, "codAmount") + self.codAmount = value + value = find_attr_value_("description", node) + if value is not None and "description" not in already_processed: + already_processed.add("description") + self.description = value + value = find_attr_value_("weightOz", node) + if value is not None and "weightOz" not in already_processed: + already_processed.add("weightOz") + value = self.gds_parse_float(value, node, "weightOz") + self.weightOz = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class PackageType + + +class PickupType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, + contactName=None, + phoneNumber=None, + pickupDate=None, + pickupTime=None, + closingTime=None, + location=None, + valueOf_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.contactName = _cast(None, contactName) + self.contactName_nsprefix_ = None + self.phoneNumber = _cast(None, phoneNumber) + self.phoneNumber_nsprefix_ = None + if isinstance(pickupDate, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(pickupDate, "%Y-%m-%d").date() + else: + initvalue_ = pickupDate + self.pickupDate = initvalue_ + self.pickupTime = _cast(None, pickupTime) + self.pickupTime_nsprefix_ = None + self.closingTime = _cast(None, closingTime) + self.closingTime_nsprefix_ = None + self.location = _cast(None, location) + self.location_nsprefix_ = None + self.valueOf_ = valueOf_ + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, PickupType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PickupType.subclass: + return PickupType.subclass(*args_, **kwargs_) + else: + return PickupType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_contactName(self): + return self.contactName + + def set_contactName(self, contactName): + self.contactName = contactName + + def get_phoneNumber(self): + return self.phoneNumber + + def set_phoneNumber(self, phoneNumber): + self.phoneNumber = phoneNumber + + def get_pickupDate(self): + return self.pickupDate + + def set_pickupDate(self, pickupDate): + self.pickupDate = pickupDate + + def get_pickupTime(self): + return self.pickupTime + + def set_pickupTime(self, pickupTime): + self.pickupTime = pickupTime + + def get_closingTime(self): + return self.closingTime + + def set_closingTime(self, closingTime): + self.closingTime = closingTime + + def get_location(self): + return self.location + + def set_location(self, location): + self.location = location + + def get_valueOf_(self): + return self.valueOf_ + + def set_valueOf_(self, valueOf_): + self.valueOf_ = valueOf_ + + def _hasContent(self): + if 1 if type(self.valueOf_) in [int, float] else self.valueOf_: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PickupType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("PickupType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "PickupType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="PickupType" + ) + if self._hasContent(): + outfile.write(">") + outfile.write(self.convert_unicode(self.valueOf_)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="PickupType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="PickupType" + ): + if self.contactName is not None and "contactName" not in already_processed: + already_processed.add("contactName") + outfile.write( + " contactName=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.contactName), input_name="contactName" + ) + ), + ) + ) + if self.phoneNumber is not None and "phoneNumber" not in already_processed: + already_processed.add("phoneNumber") + outfile.write( + " phoneNumber=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.phoneNumber), input_name="phoneNumber" + ) + ), + ) + ) + if self.pickupDate is not None and "pickupDate" not in already_processed: + already_processed.add("pickupDate") + outfile.write( + ' pickupDate="%s"' + % self.gds_format_date(self.pickupDate, input_name="pickupDate") + ) + if self.pickupTime is not None and "pickupTime" not in already_processed: + already_processed.add("pickupTime") + outfile.write( + " pickupTime=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.pickupTime), input_name="pickupTime" + ) + ), + ) + ) + if self.closingTime is not None and "closingTime" not in already_processed: + already_processed.add("closingTime") + outfile.write( + " closingTime=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.closingTime), input_name="closingTime" + ) + ), + ) + ) + if self.location is not None and "location" not in already_processed: + already_processed.add("location") + outfile.write( + " location=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.location), input_name="location" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PickupType", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("contactName", node) + if value is not None and "contactName" not in already_processed: + already_processed.add("contactName") + self.contactName = value + value = find_attr_value_("phoneNumber", node) + if value is not None and "phoneNumber" not in already_processed: + already_processed.add("phoneNumber") + self.phoneNumber = value + value = find_attr_value_("pickupDate", node) + if value is not None and "pickupDate" not in already_processed: + already_processed.add("pickupDate") + try: + self.pickupDate = self.gds_parse_date(value) + except ValueError as exp: + raise ValueError("Bad date attribute (pickupDate): %s" % exp) + value = find_attr_value_("pickupTime", node) + if value is not None and "pickupTime" not in already_processed: + already_processed.add("pickupTime") + self.pickupTime = value + value = find_attr_value_("closingTime", node) + if value is not None and "closingTime" not in already_processed: + already_processed.add("closingTime") + self.closingTime = value + value = find_attr_value_("location", node) + if value is not None and "location" not in already_processed: + already_processed.add("location") + self.location = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class PickupType + + +GDSClassesMapping = {} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + """Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + """ + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = " ".join( + ['xmlns:{}="{}"'.format(prefix, uri) for prefix, uri in nsmap.items()] + ) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = "EShipper" + rootClass = EShipper + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, namespacedef_=namespacedefs, pretty_print=True + ) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree( + inFileName, + silence=False, + print_warnings=True, + mapping=None, + reverse_mapping=None, + nsmap=None, +): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = "EShipper" + rootClass = EShipper + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, + name_=rootTag, + mapping_=mapping, + reverse_mapping_=reverse_mapping, + nsmap_=nsmap, + ) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, xml_declaration=True, encoding="utf-8" + ) + sys.stdout.write(str(content)) + sys.stdout.write("\n") + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + """Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + """ + parser = None + rootNode = parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = "EShipper" + rootClass = EShipper + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export(sys.stdout, 0, name_=rootTag, namespacedef_="") + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = "EShipper" + rootClass = EShipper + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write("#from quote_request import *\n\n") + sys.stdout.write("import quote_request as model_\n\n") + sys.stdout.write("rootObj = model_.rootClass(\n") + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(")\n") + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == "__main__": + # import pdb; pdb.set_trace() + main() + +RenameMappings_ = {} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {"http://www.eshipper.net/XMLSchema": []} + +__all__ = [ + "CODReturnAddressType", + "CODType", + "EShipper", + "FromType", + "PackageType", + "PackagesType", + "PickupType", + "QuoteRequestType", + "ToType", +] diff --git a/modules/connectors/eshipper_xml/karrio/schemas/eshipper_xml/shipment_cancel_reply.py b/modules/connectors/eshipper_xml/karrio/schemas/eshipper_xml/shipment_cancel_reply.py new file mode 100644 index 0000000000..62645ad894 --- /dev/null +++ b/modules/connectors/eshipper_xml/karrio/schemas/eshipper_xml/shipment_cancel_reply.py @@ -0,0 +1,2063 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Mon Oct 24 11:17:22 2022 by generateDS.py version 2.41.1. +# Python 3.10.6 (main, Aug 30 2022, 05:12:36) [Clang 13.1.6 (clang-1316.0.21.2.5)] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio.schemas.eshipper_xml/shipment_cancel_reply.py') +# +# Command line arguments: +# ./vendor/schemas/shipment_cancel_reply.xsd +# +# Command line: +# /Users/danielkobina/Workspace/project/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio.schemas.eshipper_xml/shipment_cancel_reply.py" ./vendor/schemas/shipment_cancel_reply.xsd +# +# Current working directory (os.getcwd()): +# eshipper_xml +# + +import sys + +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import ( + GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_, + ) +except ModulenotfoundExp_: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_: + + class GdsCollector_(object): + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_: + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r"(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$") + + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return self.__name + + def dst(self, dt): + return None + + def __str__(self): + settings = { + "str_pretty_print": True, + "str_indent_level": 0, + "str_namespaceprefix": "", + "str_name": self.__class__.__name__, + "str_namespacedefs": "", + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings["str_indent_level"], + pretty_print=settings["str_pretty_print"], + namespaceprefix_=settings["str_namespaceprefix"], + name_=settings["str_name"], + namespacedef_=settings["str_namespacedefs"], + ) + strval = output.getvalue() + output.close() + return strval + + def gds_format_string(self, input_data, input_name=""): + return input_data + + def gds_parse_string(self, input_data, node=None, input_name=""): + return input_data + + def gds_validate_string(self, input_data, node=None, input_name=""): + if not input_data: + return "" + else: + return input_data + + def gds_format_base64(self, input_data, input_name=""): + return base64.b64encode(input_data).decode("ascii") + + def gds_validate_base64(self, input_data, node=None, input_name=""): + return input_data + + def gds_format_integer(self, input_data, input_name=""): + return "%d" % int(input_data) + + def gds_parse_integer(self, input_data, node=None, input_name=""): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, "Requires integer value: %s" % exp) + return ival + + def gds_validate_integer(self, input_data, node=None, input_name=""): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires integer value") + return value + + def gds_format_integer_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_integer_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, "Requires sequence of integer values") + return values + + def gds_format_float(self, input_data, input_name=""): + return ("%.15f" % float(input_data)).rstrip("0") + + def gds_parse_float(self, input_data, node=None, input_name=""): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, "Requires float or double value: %s" % exp) + return fval_ + + def gds_validate_float(self, input_data, node=None, input_name=""): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires float value") + return value + + def gds_format_float_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_float_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, "Requires sequence of float values") + return values + + def gds_format_decimal(self, input_data, input_name=""): + return_value = "%s" % input_data + if "." in return_value: + return_value = return_value.rstrip("0") + if return_value.endswith("."): + return_value = return_value.rstrip(".") + return return_value + + def gds_parse_decimal(self, input_data, node=None, input_name=""): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires decimal value") + return decimal_value + + def gds_validate_decimal(self, input_data, node=None, input_name=""): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires decimal value") + return value + + def gds_format_decimal_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return " ".join([self.gds_format_decimal(item) for item in input_data]) + + def gds_validate_decimal_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, "Requires sequence of decimal values") + return values + + def gds_format_double(self, input_data, input_name=""): + return "%s" % input_data + + def gds_parse_double(self, input_data, node=None, input_name=""): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, "Requires double or float value: %s" % exp) + return fval_ + + def gds_validate_double(self, input_data, node=None, input_name=""): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires double or float value") + return value + + def gds_format_double_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_double_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, "Requires sequence of double or float values" + ) + return values + + def gds_format_boolean(self, input_data, input_name=""): + return ("%s" % input_data).lower() + + def gds_parse_boolean(self, input_data, node=None, input_name=""): + input_data = input_data.strip() + if input_data in ("true", "1"): + bval = True + elif input_data in ("false", "0"): + bval = False + else: + raise_parse_error(node, "Requires boolean value") + return bval + + def gds_validate_boolean(self, input_data, node=None, input_name=""): + if input_data not in ( + True, + 1, + False, + 0, + ): + raise_parse_error( + node, "Requires boolean value " "(one of True, 1, False, 0)" + ) + return input_data + + def gds_format_boolean_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_boolean_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in ( + True, + 1, + False, + 0, + ): + raise_parse_error( + node, + "Requires sequence of boolean values " + "(one of True, 1, False, 0)", + ) + return values + + def gds_validate_datetime(self, input_data, node=None, input_name=""): + return input_data + + def gds_format_datetime(self, input_data, input_name=""): + if input_data.microsecond == 0: + _svalue = "%04d-%02d-%02dT%02d:%02d:%02d" % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = "%04d-%02d-%02dT%02d:%02d:%02d.%s" % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ("%f" % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += "Z" + else: + if total_seconds < 0: + _svalue += "-" + total_seconds *= -1 + else: + _svalue += "+" + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += "{0:02d}:{1:02d}".format(hours, minutes) + return _svalue + + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == "Z": + tz = GeneratedsSuper._FixedOffsetTZ(0, "UTC") + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(":") + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == "-": + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ(tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split(".") + if len(time_parts) > 1: + micro_seconds = int(float("0." + time_parts[1]) * 1000000) + input_data = "%s.%s" % ( + time_parts[0], + "{}".format(micro_seconds).rjust(6, "0"), + ) + dt = datetime_.datetime.strptime(input_data, "%Y-%m-%dT%H:%M:%S.%f") + else: + dt = datetime_.datetime.strptime(input_data, "%Y-%m-%dT%H:%M:%S") + dt = dt.replace(tzinfo=tz) + return dt + + def gds_validate_date(self, input_data, node=None, input_name=""): + return input_data + + def gds_format_date(self, input_data, input_name=""): + _svalue = "%04d-%02d-%02d" % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += "Z" + else: + if total_seconds < 0: + _svalue += "-" + total_seconds *= -1 + else: + _svalue += "+" + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += "{0:02d}:{1:02d}".format(hours, minutes) + except AttributeError: + pass + return _svalue + + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == "Z": + tz = GeneratedsSuper._FixedOffsetTZ(0, "UTC") + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(":") + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == "-": + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ(tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, "%Y-%m-%d") + dt = dt.replace(tzinfo=tz) + return dt.date() + + def gds_validate_time(self, input_data, node=None, input_name=""): + return input_data + + def gds_format_time(self, input_data, input_name=""): + if input_data.microsecond == 0: + _svalue = "%02d:%02d:%02d" % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = "%02d:%02d:%02d.%s" % ( + input_data.hour, + input_data.minute, + input_data.second, + ("%f" % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += "Z" + else: + if total_seconds < 0: + _svalue += "-" + total_seconds *= -1 + else: + _svalue += "+" + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += "{0:02d}:{1:02d}".format(hours, minutes) + return _svalue + + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == "Z": + tz = GeneratedsSuper._FixedOffsetTZ(0, "UTC") + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(":") + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == "-": + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ(tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split(".")) > 1: + dt = datetime_.datetime.strptime(input_data, "%H:%M:%S.%f") + else: + dt = datetime_.datetime.strptime(input_data, "%H:%M:%S") + dt = dt.replace(tzinfo=tz) + return dt.time() + + def gds_check_cardinality_( + self, value, input_name, min_occurs=0, max_occurs=1, required=None + ): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None: + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_() + ) + ) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), min_occurs, length + ) + ) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), max_occurs, length + ) + ) + + def gds_validate_builtin_ST_( + self, + validator, + value, + input_name, + min_occurs=None, + max_occurs=None, + required=None, + ): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + + def gds_validate_defined_ST_( + self, + validator, + value, + input_name, + min_occurs=None, + max_occurs=None, + required=None, + ): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + + def gds_str_lower(self, instring): + return instring.lower() + + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = "/".join(path_list) + return path + + Tag_strip_pattern_ = re_.compile(r"\{.*\}") + + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub("", node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if "xsi" in node.nsmap: + classname = node.get("{%s}type" % node.nsmap["xsi"]) + if classname is not None: + names = classname.split(":") + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = "utf-8" + return instring.encode(encoding) + else: + return instring + + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode("utf8") + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + + def __eq__(self, other): + def excl_select_objs_(obj): + return obj[0] != "parent_object_" and obj[0] != "gds_collector_" + + if type(self) != type(other): + return False + return all( + x == y + for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()), + ) + ) + + def __ne__(self, other): + return not self.__eq__(other) + + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + + def gds_get_node_lineno_(self): + if ( + hasattr(self, "gds_elementtree_node_") + and self.gds_elementtree_node_ is not None + ): + return " near line {}".format(self.gds_elementtree_node_.sourceline) + else: + return "" + + def getSubclassFromModule_(module, class_): + """Get the subclass of a class from a specific module.""" + name = class_.__name__ + "Sub" + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = "" +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r"({.*})?(.*)") +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r"{(.*)}(.*)") +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(" ") + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return "" + s1 = isinstance(inStr, BaseStrType_) and inStr or "%s" % inStr + s2 = "" + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos : mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start() : mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace("&", "&") + s1 = s1.replace("<", "<") + s1 = s1.replace(">", ">") + return s1 + + +def quote_attrib(inStr): + s1 = isinstance(inStr, BaseStrType_) and inStr or "%s" % inStr + s1 = s1.replace("&", "&") + s1 = s1.replace("<", "<") + s1 = s1.replace(">", ">") + s1 = s1.replace("\n", " ") + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find("\n") == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find("\n") == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = "" + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(":") + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == "xml": + namespace = "http://www.w3.org/XML/1998/namespace" + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get( + "{%s}%s" + % ( + namespace, + name, + ) + ) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = "%s (element %s/line %d)" % ( + msg, + node.tag, + node.sourceline, + ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + + def getCategory(self): + return self.category + + def getContenttype(self, content_type): + return self.content_type + + def getValue(self): + return self.value + + def getName(self): + return self.name + + def export(self, outfile, level, name, namespace, pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, pretty_print=pretty_print + ) + + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write("<%s>%s" % (self.name, self.value, self.name)) + elif ( + self.content_type == MixedContainer.TypeInteger + or self.content_type == MixedContainer.TypeBoolean + ): + outfile.write("<%s>%d" % (self.name, self.value, self.name)) + elif ( + self.content_type == MixedContainer.TypeFloat + or self.content_type == MixedContainer.TypeDecimal + ): + outfile.write("<%s>%f" % (self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write("<%s>%g" % (self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write( + "<%s>%s" % (self.name, base64.b64encode(self.value), self.name) + ) + + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement(element, "%s" % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif ( + self.content_type == MixedContainer.TypeInteger + or self.content_type == MixedContainer.TypeBoolean + ): + text = "%d" % self.value + elif ( + self.content_type == MixedContainer.TypeFloat + or self.content_type == MixedContainer.TypeDecimal + ): + text = "%f" % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = "%g" % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = "%s" % base64.b64encode(self.value) + return text + + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' + % (self.category, self.content_type, self.name, self.value) + ) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' + % (self.category, self.content_type, self.name, self.value) + ) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' + % ( + self.category, + self.content_type, + self.name, + ) + ) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(")\n") + + +class MemberSpec_(object): + def __init__( + self, + name="", + data_type="", + container=0, + optional=0, + child_attrs=None, + choice=None, + ): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + + def set_name(self, name): + self.name = name + + def get_name(self): + return self.name + + def set_data_type(self, data_type): + self.data_type = data_type + + def get_data_type_chain(self): + return self.data_type + + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return "xs:string" + else: + return self.data_type + + def set_container(self, container): + self.container = container + + def get_container(self): + return self.container + + def set_child_attrs(self, child_attrs): + self.child_attrs = child_attrs + + def get_child_attrs(self): + return self.child_attrs + + def set_choice(self, choice): + self.choice = choice + + def get_choice(self): + return self.choice + + def set_optional(self, optional): + self.optional = optional + + def get_optional(self): + return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Data representation classes. +# + + +class EShipper(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, version=None, ShipmentCancelReply=None, gds_collector_=None, **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.version = _cast(None, version) + self.version_nsprefix_ = None + self.ShipmentCancelReply = ShipmentCancelReply + self.ShipmentCancelReply_nsprefix_ = None + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, EShipper) + if subclass is not None: + return subclass(*args_, **kwargs_) + if EShipper.subclass: + return EShipper.subclass(*args_, **kwargs_) + else: + return EShipper(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_ShipmentCancelReply(self): + return self.ShipmentCancelReply + + def set_ShipmentCancelReply(self, ShipmentCancelReply): + self.ShipmentCancelReply = ShipmentCancelReply + + def get_version(self): + return self.version + + def set_version(self, version): + self.version = version + + def _hasContent(self): + if self.ShipmentCancelReply is not None: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="EShipper", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("EShipper") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "EShipper": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="EShipper" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="EShipper", + pretty_print=pretty_print, + ) + showIndent(outfile, level, pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="EShipper" + ): + if self.version is not None and "version" not in already_processed: + already_processed.add("version") + outfile.write( + " version=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.version), input_name="version" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="EShipper", + fromsubclass_=False, + pretty_print=True, + ): + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.ShipmentCancelReply is not None: + namespaceprefix_ = ( + self.ShipmentCancelReply_nsprefix_ + ":" + if (UseCapturedNS_ and self.ShipmentCancelReply_nsprefix_) + else "" + ) + self.ShipmentCancelReply.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="ShipmentCancelReply", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("version", node) + if value is not None and "version" not in already_processed: + already_processed.add("version") + self.version = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "ShipmentCancelReply": + obj_ = ShipmentCancelReplyType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ShipmentCancelReply = obj_ + obj_.original_tagname_ = "ShipmentCancelReply" + + +# end class EShipper + + +class ShipmentCancelReplyType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__(self, Order=None, Status=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.Order = Order + self.Order_nsprefix_ = None + self.Status = Status + self.Status_nsprefix_ = None + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ShipmentCancelReplyType + ) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ShipmentCancelReplyType.subclass: + return ShipmentCancelReplyType.subclass(*args_, **kwargs_) + else: + return ShipmentCancelReplyType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_Order(self): + return self.Order + + def set_Order(self, Order): + self.Order = Order + + def get_Status(self): + return self.Status + + def set_Status(self, Status): + self.Status = Status + + def _hasContent(self): + if self.Order is not None or self.Status is not None: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ShipmentCancelReplyType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ShipmentCancelReplyType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ShipmentCancelReplyType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ShipmentCancelReplyType", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ShipmentCancelReplyType", + pretty_print=pretty_print, + ) + showIndent(outfile, level, pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ShipmentCancelReplyType", + ): + pass + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ShipmentCancelReplyType", + fromsubclass_=False, + pretty_print=True, + ): + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.Order is not None: + namespaceprefix_ = ( + self.Order_nsprefix_ + ":" + if (UseCapturedNS_ and self.Order_nsprefix_) + else "" + ) + self.Order.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Order", + pretty_print=pretty_print, + ) + if self.Status is not None: + namespaceprefix_ = ( + self.Status_nsprefix_ + ":" + if (UseCapturedNS_ and self.Status_nsprefix_) + else "" + ) + self.Status.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Status", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + pass + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "Order": + obj_ = OrderType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Order = obj_ + obj_.original_tagname_ = "Order" + elif nodeName_ == "Status": + obj_ = StatusType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Status = obj_ + obj_.original_tagname_ = "Status" + + +# end class ShipmentCancelReplyType + + +class OrderType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, orderId=None, message=None, valueOf_=None, gds_collector_=None, **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.orderId = _cast(int, orderId) + self.orderId_nsprefix_ = None + self.message = _cast(None, message) + self.message_nsprefix_ = None + self.valueOf_ = valueOf_ + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, OrderType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if OrderType.subclass: + return OrderType.subclass(*args_, **kwargs_) + else: + return OrderType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_orderId(self): + return self.orderId + + def set_orderId(self, orderId): + self.orderId = orderId + + def get_message(self): + return self.message + + def set_message(self, message): + self.message = message + + def get_valueOf_(self): + return self.valueOf_ + + def set_valueOf_(self, valueOf_): + self.valueOf_ = valueOf_ + + def _hasContent(self): + if 1 if type(self.valueOf_) in [int, float] else self.valueOf_: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="OrderType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("OrderType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "OrderType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="OrderType" + ) + if self._hasContent(): + outfile.write(">") + outfile.write(self.convert_unicode(self.valueOf_)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="OrderType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="OrderType" + ): + if self.orderId is not None and "orderId" not in already_processed: + already_processed.add("orderId") + outfile.write( + ' orderId="%s"' + % self.gds_format_integer(self.orderId, input_name="orderId") + ) + if self.message is not None and "message" not in already_processed: + already_processed.add("message") + outfile.write( + " message=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.message), input_name="message" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="OrderType", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("orderId", node) + if value is not None and "orderId" not in already_processed: + already_processed.add("orderId") + self.orderId = self.gds_parse_integer(value, node, "orderId") + value = find_attr_value_("message", node) + if value is not None and "message" not in already_processed: + already_processed.add("message") + self.message = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class OrderType + + +class StatusType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__(self, statusId=None, valueOf_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.statusId = _cast(int, statusId) + self.statusId_nsprefix_ = None + self.valueOf_ = valueOf_ + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, StatusType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if StatusType.subclass: + return StatusType.subclass(*args_, **kwargs_) + else: + return StatusType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_statusId(self): + return self.statusId + + def set_statusId(self, statusId): + self.statusId = statusId + + def get_valueOf_(self): + return self.valueOf_ + + def set_valueOf_(self, valueOf_): + self.valueOf_ = valueOf_ + + def _hasContent(self): + if 1 if type(self.valueOf_) in [int, float] else self.valueOf_: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="StatusType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("StatusType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "StatusType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="StatusType" + ) + if self._hasContent(): + outfile.write(">") + outfile.write(self.convert_unicode(self.valueOf_)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="StatusType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="StatusType" + ): + if self.statusId is not None and "statusId" not in already_processed: + already_processed.add("statusId") + outfile.write( + ' statusId="%s"' + % self.gds_format_integer(self.statusId, input_name="statusId") + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="StatusType", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("statusId", node) + if value is not None and "statusId" not in already_processed: + already_processed.add("statusId") + self.statusId = self.gds_parse_integer(value, node, "statusId") + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class StatusType + + +GDSClassesMapping = {} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + """Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + """ + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = " ".join( + ['xmlns:{}="{}"'.format(prefix, uri) for prefix, uri in nsmap.items()] + ) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = "EShipper" + rootClass = EShipper + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, namespacedef_=namespacedefs, pretty_print=True + ) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree( + inFileName, + silence=False, + print_warnings=True, + mapping=None, + reverse_mapping=None, + nsmap=None, +): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = "EShipper" + rootClass = EShipper + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, + name_=rootTag, + mapping_=mapping, + reverse_mapping_=reverse_mapping, + nsmap_=nsmap, + ) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, xml_declaration=True, encoding="utf-8" + ) + sys.stdout.write(str(content)) + sys.stdout.write("\n") + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + """Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + """ + parser = None + rootNode = parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = "EShipper" + rootClass = EShipper + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export(sys.stdout, 0, name_=rootTag, namespacedef_="") + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = "EShipper" + rootClass = EShipper + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write("#from shipment_cancel_reply import *\n\n") + sys.stdout.write("import shipment_cancel_reply as model_\n\n") + sys.stdout.write("rootObj = model_.rootClass(\n") + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(")\n") + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == "__main__": + # import pdb; pdb.set_trace() + main() + +RenameMappings_ = {} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {"http://www.eshipper.net/xml/XMLSchema": []} + +__all__ = ["EShipper", "OrderType", "ShipmentCancelReplyType", "StatusType"] diff --git a/modules/connectors/eshipper_xml/karrio/schemas/eshipper_xml/shipment_cancel_request.py b/modules/connectors/eshipper_xml/karrio/schemas/eshipper_xml/shipment_cancel_request.py new file mode 100644 index 0000000000..bd6c9c5464 --- /dev/null +++ b/modules/connectors/eshipper_xml/karrio/schemas/eshipper_xml/shipment_cancel_request.py @@ -0,0 +1,1915 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Mon Oct 24 11:17:21 2022 by generateDS.py version 2.41.1. +# Python 3.10.6 (main, Aug 30 2022, 05:12:36) [Clang 13.1.6 (clang-1316.0.21.2.5)] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio.schemas.eshipper_xml/shipment_cancel_request.py') +# +# Command line arguments: +# ./vendor/schemas/shipment_cancel_request.xsd +# +# Command line: +# /Users/danielkobina/Workspace/project/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio.schemas.eshipper_xml/shipment_cancel_request.py" ./vendor/schemas/shipment_cancel_request.xsd +# +# Current working directory (os.getcwd()): +# eshipper_xml +# + +import sys + +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import ( + GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_, + ) +except ModulenotfoundExp_: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_: + + class GdsCollector_(object): + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_: + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r"(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$") + + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return self.__name + + def dst(self, dt): + return None + + def __str__(self): + settings = { + "str_pretty_print": True, + "str_indent_level": 0, + "str_namespaceprefix": "", + "str_name": self.__class__.__name__, + "str_namespacedefs": "", + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings["str_indent_level"], + pretty_print=settings["str_pretty_print"], + namespaceprefix_=settings["str_namespaceprefix"], + name_=settings["str_name"], + namespacedef_=settings["str_namespacedefs"], + ) + strval = output.getvalue() + output.close() + return strval + + def gds_format_string(self, input_data, input_name=""): + return input_data + + def gds_parse_string(self, input_data, node=None, input_name=""): + return input_data + + def gds_validate_string(self, input_data, node=None, input_name=""): + if not input_data: + return "" + else: + return input_data + + def gds_format_base64(self, input_data, input_name=""): + return base64.b64encode(input_data).decode("ascii") + + def gds_validate_base64(self, input_data, node=None, input_name=""): + return input_data + + def gds_format_integer(self, input_data, input_name=""): + return "%d" % int(input_data) + + def gds_parse_integer(self, input_data, node=None, input_name=""): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, "Requires integer value: %s" % exp) + return ival + + def gds_validate_integer(self, input_data, node=None, input_name=""): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires integer value") + return value + + def gds_format_integer_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_integer_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, "Requires sequence of integer values") + return values + + def gds_format_float(self, input_data, input_name=""): + return ("%.15f" % float(input_data)).rstrip("0") + + def gds_parse_float(self, input_data, node=None, input_name=""): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, "Requires float or double value: %s" % exp) + return fval_ + + def gds_validate_float(self, input_data, node=None, input_name=""): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires float value") + return value + + def gds_format_float_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_float_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, "Requires sequence of float values") + return values + + def gds_format_decimal(self, input_data, input_name=""): + return_value = "%s" % input_data + if "." in return_value: + return_value = return_value.rstrip("0") + if return_value.endswith("."): + return_value = return_value.rstrip(".") + return return_value + + def gds_parse_decimal(self, input_data, node=None, input_name=""): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires decimal value") + return decimal_value + + def gds_validate_decimal(self, input_data, node=None, input_name=""): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires decimal value") + return value + + def gds_format_decimal_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return " ".join([self.gds_format_decimal(item) for item in input_data]) + + def gds_validate_decimal_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, "Requires sequence of decimal values") + return values + + def gds_format_double(self, input_data, input_name=""): + return "%s" % input_data + + def gds_parse_double(self, input_data, node=None, input_name=""): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, "Requires double or float value: %s" % exp) + return fval_ + + def gds_validate_double(self, input_data, node=None, input_name=""): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires double or float value") + return value + + def gds_format_double_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_double_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, "Requires sequence of double or float values" + ) + return values + + def gds_format_boolean(self, input_data, input_name=""): + return ("%s" % input_data).lower() + + def gds_parse_boolean(self, input_data, node=None, input_name=""): + input_data = input_data.strip() + if input_data in ("true", "1"): + bval = True + elif input_data in ("false", "0"): + bval = False + else: + raise_parse_error(node, "Requires boolean value") + return bval + + def gds_validate_boolean(self, input_data, node=None, input_name=""): + if input_data not in ( + True, + 1, + False, + 0, + ): + raise_parse_error( + node, "Requires boolean value " "(one of True, 1, False, 0)" + ) + return input_data + + def gds_format_boolean_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_boolean_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in ( + True, + 1, + False, + 0, + ): + raise_parse_error( + node, + "Requires sequence of boolean values " + "(one of True, 1, False, 0)", + ) + return values + + def gds_validate_datetime(self, input_data, node=None, input_name=""): + return input_data + + def gds_format_datetime(self, input_data, input_name=""): + if input_data.microsecond == 0: + _svalue = "%04d-%02d-%02dT%02d:%02d:%02d" % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = "%04d-%02d-%02dT%02d:%02d:%02d.%s" % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ("%f" % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += "Z" + else: + if total_seconds < 0: + _svalue += "-" + total_seconds *= -1 + else: + _svalue += "+" + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += "{0:02d}:{1:02d}".format(hours, minutes) + return _svalue + + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == "Z": + tz = GeneratedsSuper._FixedOffsetTZ(0, "UTC") + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(":") + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == "-": + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ(tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split(".") + if len(time_parts) > 1: + micro_seconds = int(float("0." + time_parts[1]) * 1000000) + input_data = "%s.%s" % ( + time_parts[0], + "{}".format(micro_seconds).rjust(6, "0"), + ) + dt = datetime_.datetime.strptime(input_data, "%Y-%m-%dT%H:%M:%S.%f") + else: + dt = datetime_.datetime.strptime(input_data, "%Y-%m-%dT%H:%M:%S") + dt = dt.replace(tzinfo=tz) + return dt + + def gds_validate_date(self, input_data, node=None, input_name=""): + return input_data + + def gds_format_date(self, input_data, input_name=""): + _svalue = "%04d-%02d-%02d" % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += "Z" + else: + if total_seconds < 0: + _svalue += "-" + total_seconds *= -1 + else: + _svalue += "+" + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += "{0:02d}:{1:02d}".format(hours, minutes) + except AttributeError: + pass + return _svalue + + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == "Z": + tz = GeneratedsSuper._FixedOffsetTZ(0, "UTC") + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(":") + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == "-": + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ(tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, "%Y-%m-%d") + dt = dt.replace(tzinfo=tz) + return dt.date() + + def gds_validate_time(self, input_data, node=None, input_name=""): + return input_data + + def gds_format_time(self, input_data, input_name=""): + if input_data.microsecond == 0: + _svalue = "%02d:%02d:%02d" % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = "%02d:%02d:%02d.%s" % ( + input_data.hour, + input_data.minute, + input_data.second, + ("%f" % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += "Z" + else: + if total_seconds < 0: + _svalue += "-" + total_seconds *= -1 + else: + _svalue += "+" + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += "{0:02d}:{1:02d}".format(hours, minutes) + return _svalue + + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == "Z": + tz = GeneratedsSuper._FixedOffsetTZ(0, "UTC") + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(":") + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == "-": + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ(tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split(".")) > 1: + dt = datetime_.datetime.strptime(input_data, "%H:%M:%S.%f") + else: + dt = datetime_.datetime.strptime(input_data, "%H:%M:%S") + dt = dt.replace(tzinfo=tz) + return dt.time() + + def gds_check_cardinality_( + self, value, input_name, min_occurs=0, max_occurs=1, required=None + ): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None: + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_() + ) + ) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), min_occurs, length + ) + ) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), max_occurs, length + ) + ) + + def gds_validate_builtin_ST_( + self, + validator, + value, + input_name, + min_occurs=None, + max_occurs=None, + required=None, + ): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + + def gds_validate_defined_ST_( + self, + validator, + value, + input_name, + min_occurs=None, + max_occurs=None, + required=None, + ): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + + def gds_str_lower(self, instring): + return instring.lower() + + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = "/".join(path_list) + return path + + Tag_strip_pattern_ = re_.compile(r"\{.*\}") + + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub("", node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if "xsi" in node.nsmap: + classname = node.get("{%s}type" % node.nsmap["xsi"]) + if classname is not None: + names = classname.split(":") + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = "utf-8" + return instring.encode(encoding) + else: + return instring + + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode("utf8") + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + + def __eq__(self, other): + def excl_select_objs_(obj): + return obj[0] != "parent_object_" and obj[0] != "gds_collector_" + + if type(self) != type(other): + return False + return all( + x == y + for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()), + ) + ) + + def __ne__(self, other): + return not self.__eq__(other) + + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + + def gds_get_node_lineno_(self): + if ( + hasattr(self, "gds_elementtree_node_") + and self.gds_elementtree_node_ is not None + ): + return " near line {}".format(self.gds_elementtree_node_.sourceline) + else: + return "" + + def getSubclassFromModule_(module, class_): + """Get the subclass of a class from a specific module.""" + name = class_.__name__ + "Sub" + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = "" +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r"({.*})?(.*)") +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r"{(.*)}(.*)") +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(" ") + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return "" + s1 = isinstance(inStr, BaseStrType_) and inStr or "%s" % inStr + s2 = "" + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos : mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start() : mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace("&", "&") + s1 = s1.replace("<", "<") + s1 = s1.replace(">", ">") + return s1 + + +def quote_attrib(inStr): + s1 = isinstance(inStr, BaseStrType_) and inStr or "%s" % inStr + s1 = s1.replace("&", "&") + s1 = s1.replace("<", "<") + s1 = s1.replace(">", ">") + s1 = s1.replace("\n", " ") + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find("\n") == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find("\n") == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = "" + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(":") + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == "xml": + namespace = "http://www.w3.org/XML/1998/namespace" + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get( + "{%s}%s" + % ( + namespace, + name, + ) + ) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = "%s (element %s/line %d)" % ( + msg, + node.tag, + node.sourceline, + ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + + def getCategory(self): + return self.category + + def getContenttype(self, content_type): + return self.content_type + + def getValue(self): + return self.value + + def getName(self): + return self.name + + def export(self, outfile, level, name, namespace, pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, pretty_print=pretty_print + ) + + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write("<%s>%s" % (self.name, self.value, self.name)) + elif ( + self.content_type == MixedContainer.TypeInteger + or self.content_type == MixedContainer.TypeBoolean + ): + outfile.write("<%s>%d" % (self.name, self.value, self.name)) + elif ( + self.content_type == MixedContainer.TypeFloat + or self.content_type == MixedContainer.TypeDecimal + ): + outfile.write("<%s>%f" % (self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write("<%s>%g" % (self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write( + "<%s>%s" % (self.name, base64.b64encode(self.value), self.name) + ) + + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement(element, "%s" % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif ( + self.content_type == MixedContainer.TypeInteger + or self.content_type == MixedContainer.TypeBoolean + ): + text = "%d" % self.value + elif ( + self.content_type == MixedContainer.TypeFloat + or self.content_type == MixedContainer.TypeDecimal + ): + text = "%f" % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = "%g" % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = "%s" % base64.b64encode(self.value) + return text + + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' + % (self.category, self.content_type, self.name, self.value) + ) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' + % (self.category, self.content_type, self.name, self.value) + ) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' + % ( + self.category, + self.content_type, + self.name, + ) + ) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(")\n") + + +class MemberSpec_(object): + def __init__( + self, + name="", + data_type="", + container=0, + optional=0, + child_attrs=None, + choice=None, + ): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + + def set_name(self, name): + self.name = name + + def get_name(self): + return self.name + + def set_data_type(self, data_type): + self.data_type = data_type + + def get_data_type_chain(self): + return self.data_type + + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return "xs:string" + else: + return self.data_type + + def set_container(self, container): + self.container = container + + def get_container(self): + return self.container + + def set_child_attrs(self, child_attrs): + self.child_attrs = child_attrs + + def get_child_attrs(self): + return self.child_attrs + + def set_choice(self, choice): + self.choice = choice + + def get_choice(self): + return self.choice + + def set_optional(self, optional): + self.optional = optional + + def get_optional(self): + return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Data representation classes. +# + + +class EShipper(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, + username=None, + password=None, + version=None, + ShipmentCancelRequest=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.username = _cast(None, username) + self.username_nsprefix_ = None + self.password = _cast(None, password) + self.password_nsprefix_ = None + self.version = _cast(None, version) + self.version_nsprefix_ = None + self.ShipmentCancelRequest = ShipmentCancelRequest + self.ShipmentCancelRequest_nsprefix_ = None + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, EShipper) + if subclass is not None: + return subclass(*args_, **kwargs_) + if EShipper.subclass: + return EShipper.subclass(*args_, **kwargs_) + else: + return EShipper(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_ShipmentCancelRequest(self): + return self.ShipmentCancelRequest + + def set_ShipmentCancelRequest(self, ShipmentCancelRequest): + self.ShipmentCancelRequest = ShipmentCancelRequest + + def get_username(self): + return self.username + + def set_username(self, username): + self.username = username + + def get_password(self): + return self.password + + def set_password(self, password): + self.password = password + + def get_version(self): + return self.version + + def set_version(self, version): + self.version = version + + def _hasContent(self): + if self.ShipmentCancelRequest is not None: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="EShipper", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("EShipper") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "EShipper": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="EShipper" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="EShipper", + pretty_print=pretty_print, + ) + showIndent(outfile, level, pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="EShipper" + ): + if self.username is not None and "username" not in already_processed: + already_processed.add("username") + outfile.write( + " username=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.username), input_name="username" + ) + ), + ) + ) + if self.password is not None and "password" not in already_processed: + already_processed.add("password") + outfile.write( + " password=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.password), input_name="password" + ) + ), + ) + ) + if self.version is not None and "version" not in already_processed: + already_processed.add("version") + outfile.write( + " version=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.version), input_name="version" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="EShipper", + fromsubclass_=False, + pretty_print=True, + ): + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.ShipmentCancelRequest is not None: + namespaceprefix_ = ( + self.ShipmentCancelRequest_nsprefix_ + ":" + if (UseCapturedNS_ and self.ShipmentCancelRequest_nsprefix_) + else "" + ) + self.ShipmentCancelRequest.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="ShipmentCancelRequest", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("username", node) + if value is not None and "username" not in already_processed: + already_processed.add("username") + self.username = value + value = find_attr_value_("password", node) + if value is not None and "password" not in already_processed: + already_processed.add("password") + self.password = value + value = find_attr_value_("version", node) + if value is not None and "version" not in already_processed: + already_processed.add("version") + self.version = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "ShipmentCancelRequest": + obj_ = ShipmentCancelRequestType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ShipmentCancelRequest = obj_ + obj_.original_tagname_ = "ShipmentCancelRequest" + + +# end class EShipper + + +class ShipmentCancelRequestType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__(self, Order=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.Order = Order + self.Order_nsprefix_ = None + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ShipmentCancelRequestType + ) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ShipmentCancelRequestType.subclass: + return ShipmentCancelRequestType.subclass(*args_, **kwargs_) + else: + return ShipmentCancelRequestType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_Order(self): + return self.Order + + def set_Order(self, Order): + self.Order = Order + + def _hasContent(self): + if self.Order is not None: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ShipmentCancelRequestType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ShipmentCancelRequestType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ShipmentCancelRequestType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ShipmentCancelRequestType", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ShipmentCancelRequestType", + pretty_print=pretty_print, + ) + showIndent(outfile, level, pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ShipmentCancelRequestType", + ): + pass + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ShipmentCancelRequestType", + fromsubclass_=False, + pretty_print=True, + ): + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.Order is not None: + namespaceprefix_ = ( + self.Order_nsprefix_ + ":" + if (UseCapturedNS_ and self.Order_nsprefix_) + else "" + ) + self.Order.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Order", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + pass + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "Order": + obj_ = OrderType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Order = obj_ + obj_.original_tagname_ = "Order" + + +# end class ShipmentCancelRequestType + + +class OrderType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__(self, orderId=None, valueOf_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.orderId = _cast(int, orderId) + self.orderId_nsprefix_ = None + self.valueOf_ = valueOf_ + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, OrderType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if OrderType.subclass: + return OrderType.subclass(*args_, **kwargs_) + else: + return OrderType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_orderId(self): + return self.orderId + + def set_orderId(self, orderId): + self.orderId = orderId + + def get_valueOf_(self): + return self.valueOf_ + + def set_valueOf_(self, valueOf_): + self.valueOf_ = valueOf_ + + def _hasContent(self): + if 1 if type(self.valueOf_) in [int, float] else self.valueOf_: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="OrderType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("OrderType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "OrderType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="OrderType" + ) + if self._hasContent(): + outfile.write(">") + outfile.write(self.convert_unicode(self.valueOf_)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="OrderType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="OrderType" + ): + if self.orderId is not None and "orderId" not in already_processed: + already_processed.add("orderId") + outfile.write( + ' orderId="%s"' + % self.gds_format_integer(self.orderId, input_name="orderId") + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="OrderType", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("orderId", node) + if value is not None and "orderId" not in already_processed: + already_processed.add("orderId") + self.orderId = self.gds_parse_integer(value, node, "orderId") + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class OrderType + + +GDSClassesMapping = {} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + """Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + """ + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = " ".join( + ['xmlns:{}="{}"'.format(prefix, uri) for prefix, uri in nsmap.items()] + ) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = "EShipper" + rootClass = EShipper + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, namespacedef_=namespacedefs, pretty_print=True + ) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree( + inFileName, + silence=False, + print_warnings=True, + mapping=None, + reverse_mapping=None, + nsmap=None, +): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = "EShipper" + rootClass = EShipper + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, + name_=rootTag, + mapping_=mapping, + reverse_mapping_=reverse_mapping, + nsmap_=nsmap, + ) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, xml_declaration=True, encoding="utf-8" + ) + sys.stdout.write(str(content)) + sys.stdout.write("\n") + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + """Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + """ + parser = None + rootNode = parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = "EShipper" + rootClass = EShipper + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export(sys.stdout, 0, name_=rootTag, namespacedef_="") + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = "EShipper" + rootClass = EShipper + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write("#from shipment_cancel_request import *\n\n") + sys.stdout.write("import shipment_cancel_request as model_\n\n") + sys.stdout.write("rootObj = model_.rootClass(\n") + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(")\n") + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == "__main__": + # import pdb; pdb.set_trace() + main() + +RenameMappings_ = {} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {"http://www.eshipper.net/XMLSchema": []} + +__all__ = ["EShipper", "OrderType", "ShipmentCancelRequestType"] diff --git a/modules/connectors/eshipper_xml/karrio/schemas/eshipper_xml/shipping_reply.py b/modules/connectors/eshipper_xml/karrio/schemas/eshipper_xml/shipping_reply.py new file mode 100644 index 0000000000..8fecf7be62 --- /dev/null +++ b/modules/connectors/eshipper_xml/karrio/schemas/eshipper_xml/shipping_reply.py @@ -0,0 +1,3836 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Mon Oct 24 11:17:21 2022 by generateDS.py version 2.41.1. +# Python 3.10.6 (main, Aug 30 2022, 05:12:36) [Clang 13.1.6 (clang-1316.0.21.2.5)] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio.schemas.eshipper_xml/shipping_reply.py') +# +# Command line arguments: +# ./vendor/schemas/shipping_reply.xsd +# +# Command line: +# /Users/danielkobina/Workspace/project/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio.schemas.eshipper_xml/shipping_reply.py" ./vendor/schemas/shipping_reply.xsd +# +# Current working directory (os.getcwd()): +# eshipper_xml +# + +import sys + +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import ( + GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_, + ) +except ModulenotfoundExp_: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_: + + class GdsCollector_(object): + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_: + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r"(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$") + + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return self.__name + + def dst(self, dt): + return None + + def __str__(self): + settings = { + "str_pretty_print": True, + "str_indent_level": 0, + "str_namespaceprefix": "", + "str_name": self.__class__.__name__, + "str_namespacedefs": "", + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings["str_indent_level"], + pretty_print=settings["str_pretty_print"], + namespaceprefix_=settings["str_namespaceprefix"], + name_=settings["str_name"], + namespacedef_=settings["str_namespacedefs"], + ) + strval = output.getvalue() + output.close() + return strval + + def gds_format_string(self, input_data, input_name=""): + return input_data + + def gds_parse_string(self, input_data, node=None, input_name=""): + return input_data + + def gds_validate_string(self, input_data, node=None, input_name=""): + if not input_data: + return "" + else: + return input_data + + def gds_format_base64(self, input_data, input_name=""): + return base64.b64encode(input_data).decode("ascii") + + def gds_validate_base64(self, input_data, node=None, input_name=""): + return input_data + + def gds_format_integer(self, input_data, input_name=""): + return "%d" % int(input_data) + + def gds_parse_integer(self, input_data, node=None, input_name=""): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, "Requires integer value: %s" % exp) + return ival + + def gds_validate_integer(self, input_data, node=None, input_name=""): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires integer value") + return value + + def gds_format_integer_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_integer_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, "Requires sequence of integer values") + return values + + def gds_format_float(self, input_data, input_name=""): + return ("%.15f" % float(input_data)).rstrip("0") + + def gds_parse_float(self, input_data, node=None, input_name=""): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, "Requires float or double value: %s" % exp) + return fval_ + + def gds_validate_float(self, input_data, node=None, input_name=""): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires float value") + return value + + def gds_format_float_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_float_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, "Requires sequence of float values") + return values + + def gds_format_decimal(self, input_data, input_name=""): + return_value = "%s" % input_data + if "." in return_value: + return_value = return_value.rstrip("0") + if return_value.endswith("."): + return_value = return_value.rstrip(".") + return return_value + + def gds_parse_decimal(self, input_data, node=None, input_name=""): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires decimal value") + return decimal_value + + def gds_validate_decimal(self, input_data, node=None, input_name=""): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires decimal value") + return value + + def gds_format_decimal_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return " ".join([self.gds_format_decimal(item) for item in input_data]) + + def gds_validate_decimal_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, "Requires sequence of decimal values") + return values + + def gds_format_double(self, input_data, input_name=""): + return "%s" % input_data + + def gds_parse_double(self, input_data, node=None, input_name=""): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, "Requires double or float value: %s" % exp) + return fval_ + + def gds_validate_double(self, input_data, node=None, input_name=""): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires double or float value") + return value + + def gds_format_double_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_double_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, "Requires sequence of double or float values" + ) + return values + + def gds_format_boolean(self, input_data, input_name=""): + return ("%s" % input_data).lower() + + def gds_parse_boolean(self, input_data, node=None, input_name=""): + input_data = input_data.strip() + if input_data in ("true", "1"): + bval = True + elif input_data in ("false", "0"): + bval = False + else: + raise_parse_error(node, "Requires boolean value") + return bval + + def gds_validate_boolean(self, input_data, node=None, input_name=""): + if input_data not in ( + True, + 1, + False, + 0, + ): + raise_parse_error( + node, "Requires boolean value " "(one of True, 1, False, 0)" + ) + return input_data + + def gds_format_boolean_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_boolean_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in ( + True, + 1, + False, + 0, + ): + raise_parse_error( + node, + "Requires sequence of boolean values " + "(one of True, 1, False, 0)", + ) + return values + + def gds_validate_datetime(self, input_data, node=None, input_name=""): + return input_data + + def gds_format_datetime(self, input_data, input_name=""): + if input_data.microsecond == 0: + _svalue = "%04d-%02d-%02dT%02d:%02d:%02d" % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = "%04d-%02d-%02dT%02d:%02d:%02d.%s" % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ("%f" % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += "Z" + else: + if total_seconds < 0: + _svalue += "-" + total_seconds *= -1 + else: + _svalue += "+" + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += "{0:02d}:{1:02d}".format(hours, minutes) + return _svalue + + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == "Z": + tz = GeneratedsSuper._FixedOffsetTZ(0, "UTC") + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(":") + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == "-": + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ(tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split(".") + if len(time_parts) > 1: + micro_seconds = int(float("0." + time_parts[1]) * 1000000) + input_data = "%s.%s" % ( + time_parts[0], + "{}".format(micro_seconds).rjust(6, "0"), + ) + dt = datetime_.datetime.strptime(input_data, "%Y-%m-%dT%H:%M:%S.%f") + else: + dt = datetime_.datetime.strptime(input_data, "%Y-%m-%dT%H:%M:%S") + dt = dt.replace(tzinfo=tz) + return dt + + def gds_validate_date(self, input_data, node=None, input_name=""): + return input_data + + def gds_format_date(self, input_data, input_name=""): + _svalue = "%04d-%02d-%02d" % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += "Z" + else: + if total_seconds < 0: + _svalue += "-" + total_seconds *= -1 + else: + _svalue += "+" + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += "{0:02d}:{1:02d}".format(hours, minutes) + except AttributeError: + pass + return _svalue + + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == "Z": + tz = GeneratedsSuper._FixedOffsetTZ(0, "UTC") + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(":") + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == "-": + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ(tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, "%Y-%m-%d") + dt = dt.replace(tzinfo=tz) + return dt.date() + + def gds_validate_time(self, input_data, node=None, input_name=""): + return input_data + + def gds_format_time(self, input_data, input_name=""): + if input_data.microsecond == 0: + _svalue = "%02d:%02d:%02d" % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = "%02d:%02d:%02d.%s" % ( + input_data.hour, + input_data.minute, + input_data.second, + ("%f" % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += "Z" + else: + if total_seconds < 0: + _svalue += "-" + total_seconds *= -1 + else: + _svalue += "+" + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += "{0:02d}:{1:02d}".format(hours, minutes) + return _svalue + + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == "Z": + tz = GeneratedsSuper._FixedOffsetTZ(0, "UTC") + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(":") + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == "-": + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ(tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split(".")) > 1: + dt = datetime_.datetime.strptime(input_data, "%H:%M:%S.%f") + else: + dt = datetime_.datetime.strptime(input_data, "%H:%M:%S") + dt = dt.replace(tzinfo=tz) + return dt.time() + + def gds_check_cardinality_( + self, value, input_name, min_occurs=0, max_occurs=1, required=None + ): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None: + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_() + ) + ) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), min_occurs, length + ) + ) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), max_occurs, length + ) + ) + + def gds_validate_builtin_ST_( + self, + validator, + value, + input_name, + min_occurs=None, + max_occurs=None, + required=None, + ): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + + def gds_validate_defined_ST_( + self, + validator, + value, + input_name, + min_occurs=None, + max_occurs=None, + required=None, + ): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + + def gds_str_lower(self, instring): + return instring.lower() + + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = "/".join(path_list) + return path + + Tag_strip_pattern_ = re_.compile(r"\{.*\}") + + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub("", node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if "xsi" in node.nsmap: + classname = node.get("{%s}type" % node.nsmap["xsi"]) + if classname is not None: + names = classname.split(":") + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = "utf-8" + return instring.encode(encoding) + else: + return instring + + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode("utf8") + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + + def __eq__(self, other): + def excl_select_objs_(obj): + return obj[0] != "parent_object_" and obj[0] != "gds_collector_" + + if type(self) != type(other): + return False + return all( + x == y + for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()), + ) + ) + + def __ne__(self, other): + return not self.__eq__(other) + + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + + def gds_get_node_lineno_(self): + if ( + hasattr(self, "gds_elementtree_node_") + and self.gds_elementtree_node_ is not None + ): + return " near line {}".format(self.gds_elementtree_node_.sourceline) + else: + return "" + + def getSubclassFromModule_(module, class_): + """Get the subclass of a class from a specific module.""" + name = class_.__name__ + "Sub" + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = "" +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r"({.*})?(.*)") +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r"{(.*)}(.*)") +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(" ") + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return "" + s1 = isinstance(inStr, BaseStrType_) and inStr or "%s" % inStr + s2 = "" + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos : mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start() : mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace("&", "&") + s1 = s1.replace("<", "<") + s1 = s1.replace(">", ">") + return s1 + + +def quote_attrib(inStr): + s1 = isinstance(inStr, BaseStrType_) and inStr or "%s" % inStr + s1 = s1.replace("&", "&") + s1 = s1.replace("<", "<") + s1 = s1.replace(">", ">") + s1 = s1.replace("\n", " ") + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find("\n") == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find("\n") == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = "" + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(":") + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == "xml": + namespace = "http://www.w3.org/XML/1998/namespace" + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get( + "{%s}%s" + % ( + namespace, + name, + ) + ) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = "%s (element %s/line %d)" % ( + msg, + node.tag, + node.sourceline, + ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + + def getCategory(self): + return self.category + + def getContenttype(self, content_type): + return self.content_type + + def getValue(self): + return self.value + + def getName(self): + return self.name + + def export(self, outfile, level, name, namespace, pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, pretty_print=pretty_print + ) + + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write("<%s>%s" % (self.name, self.value, self.name)) + elif ( + self.content_type == MixedContainer.TypeInteger + or self.content_type == MixedContainer.TypeBoolean + ): + outfile.write("<%s>%d" % (self.name, self.value, self.name)) + elif ( + self.content_type == MixedContainer.TypeFloat + or self.content_type == MixedContainer.TypeDecimal + ): + outfile.write("<%s>%f" % (self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write("<%s>%g" % (self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write( + "<%s>%s" % (self.name, base64.b64encode(self.value), self.name) + ) + + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement(element, "%s" % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif ( + self.content_type == MixedContainer.TypeInteger + or self.content_type == MixedContainer.TypeBoolean + ): + text = "%d" % self.value + elif ( + self.content_type == MixedContainer.TypeFloat + or self.content_type == MixedContainer.TypeDecimal + ): + text = "%f" % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = "%g" % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = "%s" % base64.b64encode(self.value) + return text + + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' + % (self.category, self.content_type, self.name, self.value) + ) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' + % (self.category, self.content_type, self.name, self.value) + ) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' + % ( + self.category, + self.content_type, + self.name, + ) + ) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(")\n") + + +class MemberSpec_(object): + def __init__( + self, + name="", + data_type="", + container=0, + optional=0, + child_attrs=None, + choice=None, + ): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + + def set_name(self, name): + self.name = name + + def get_name(self): + return self.name + + def set_data_type(self, data_type): + self.data_type = data_type + + def get_data_type_chain(self): + return self.data_type + + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return "xs:string" + else: + return self.data_type + + def set_container(self, container): + self.container = container + + def get_container(self): + return self.container + + def set_child_attrs(self, child_attrs): + self.child_attrs = child_attrs + + def get_child_attrs(self): + return self.child_attrs + + def set_choice(self, choice): + self.choice = choice + + def get_choice(self): + return self.choice + + def set_optional(self, optional): + self.optional = optional + + def get_optional(self): + return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Data representation classes. +# + + +class EShipper(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, version=None, ShippingReply=None, gds_collector_=None, **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.version = _cast(None, version) + self.version_nsprefix_ = None + self.ShippingReply = ShippingReply + self.ShippingReply_nsprefix_ = None + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, EShipper) + if subclass is not None: + return subclass(*args_, **kwargs_) + if EShipper.subclass: + return EShipper.subclass(*args_, **kwargs_) + else: + return EShipper(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_ShippingReply(self): + return self.ShippingReply + + def set_ShippingReply(self, ShippingReply): + self.ShippingReply = ShippingReply + + def get_version(self): + return self.version + + def set_version(self, version): + self.version = version + + def _hasContent(self): + if self.ShippingReply is not None: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="EShipper", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("EShipper") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "EShipper": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="EShipper" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="EShipper", + pretty_print=pretty_print, + ) + showIndent(outfile, level, pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="EShipper" + ): + if self.version is not None and "version" not in already_processed: + already_processed.add("version") + outfile.write( + " version=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.version), input_name="version" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="EShipper", + fromsubclass_=False, + pretty_print=True, + ): + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.ShippingReply is not None: + namespaceprefix_ = ( + self.ShippingReply_nsprefix_ + ":" + if (UseCapturedNS_ and self.ShippingReply_nsprefix_) + else "" + ) + self.ShippingReply.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="ShippingReply", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("version", node) + if value is not None and "version" not in already_processed: + already_processed.add("version") + self.version = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "ShippingReply": + obj_ = ShippingReplyType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ShippingReply = obj_ + obj_.original_tagname_ = "ShippingReply" + + +# end class EShipper + + +class ShippingReplyType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, + Order=None, + Carrier=None, + Reference=None, + Package=None, + Pickup=None, + TrackingURL=None, + Labels=None, + LabelData=None, + CustomsInvoice=None, + Quote=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.Order = Order + self.Order_nsprefix_ = None + self.Carrier = Carrier + self.Carrier_nsprefix_ = None + self.Reference = Reference + self.Reference_nsprefix_ = None + if Package is None: + self.Package = [] + else: + self.Package = Package + self.Package_nsprefix_ = None + self.Pickup = Pickup + self.Pickup_nsprefix_ = None + self.TrackingURL = TrackingURL + self.TrackingURL_nsprefix_ = None + self.Labels = Labels + self.Labels_nsprefix_ = None + self.LabelData = LabelData + self.LabelData_nsprefix_ = None + self.CustomsInvoice = CustomsInvoice + self.CustomsInvoice_nsprefix_ = None + self.Quote = Quote + self.Quote_nsprefix_ = None + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, ShippingReplyType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ShippingReplyType.subclass: + return ShippingReplyType.subclass(*args_, **kwargs_) + else: + return ShippingReplyType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_Order(self): + return self.Order + + def set_Order(self, Order): + self.Order = Order + + def get_Carrier(self): + return self.Carrier + + def set_Carrier(self, Carrier): + self.Carrier = Carrier + + def get_Reference(self): + return self.Reference + + def set_Reference(self, Reference): + self.Reference = Reference + + def get_Package(self): + return self.Package + + def set_Package(self, Package): + self.Package = Package + + def add_Package(self, value): + self.Package.append(value) + + def insert_Package_at(self, index, value): + self.Package.insert(index, value) + + def replace_Package_at(self, index, value): + self.Package[index] = value + + def get_Pickup(self): + return self.Pickup + + def set_Pickup(self, Pickup): + self.Pickup = Pickup + + def get_TrackingURL(self): + return self.TrackingURL + + def set_TrackingURL(self, TrackingURL): + self.TrackingURL = TrackingURL + + def get_Labels(self): + return self.Labels + + def set_Labels(self, Labels): + self.Labels = Labels + + def get_LabelData(self): + return self.LabelData + + def set_LabelData(self, LabelData): + self.LabelData = LabelData + + def get_CustomsInvoice(self): + return self.CustomsInvoice + + def set_CustomsInvoice(self, CustomsInvoice): + self.CustomsInvoice = CustomsInvoice + + def get_Quote(self): + return self.Quote + + def set_Quote(self, Quote): + self.Quote = Quote + + def _hasContent(self): + if ( + self.Order is not None + or self.Carrier is not None + or self.Reference is not None + or self.Package + or self.Pickup is not None + or self.TrackingURL is not None + or self.Labels is not None + or self.LabelData is not None + or self.CustomsInvoice is not None + or self.Quote is not None + ): + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ShippingReplyType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ShippingReplyType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ShippingReplyType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ShippingReplyType", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ShippingReplyType", + pretty_print=pretty_print, + ) + showIndent(outfile, level, pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ShippingReplyType", + ): + pass + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ShippingReplyType", + fromsubclass_=False, + pretty_print=True, + ): + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.Order is not None: + namespaceprefix_ = ( + self.Order_nsprefix_ + ":" + if (UseCapturedNS_ and self.Order_nsprefix_) + else "" + ) + self.Order.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Order", + pretty_print=pretty_print, + ) + if self.Carrier is not None: + namespaceprefix_ = ( + self.Carrier_nsprefix_ + ":" + if (UseCapturedNS_ and self.Carrier_nsprefix_) + else "" + ) + self.Carrier.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Carrier", + pretty_print=pretty_print, + ) + if self.Reference is not None: + namespaceprefix_ = ( + self.Reference_nsprefix_ + ":" + if (UseCapturedNS_ and self.Reference_nsprefix_) + else "" + ) + self.Reference.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Reference", + pretty_print=pretty_print, + ) + for Package_ in self.Package: + namespaceprefix_ = ( + self.Package_nsprefix_ + ":" + if (UseCapturedNS_ and self.Package_nsprefix_) + else "" + ) + Package_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Package", + pretty_print=pretty_print, + ) + if self.Pickup is not None: + namespaceprefix_ = ( + self.Pickup_nsprefix_ + ":" + if (UseCapturedNS_ and self.Pickup_nsprefix_) + else "" + ) + self.Pickup.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Pickup", + pretty_print=pretty_print, + ) + if self.TrackingURL is not None: + namespaceprefix_ = ( + self.TrackingURL_nsprefix_ + ":" + if (UseCapturedNS_ and self.TrackingURL_nsprefix_) + else "" + ) + showIndent(outfile, level, pretty_print) + outfile.write( + "<%sTrackingURL>%s%s" + % ( + namespaceprefix_, + self.gds_encode( + self.gds_format_string( + quote_xml(self.TrackingURL), input_name="TrackingURL" + ) + ), + namespaceprefix_, + eol_, + ) + ) + if self.Labels is not None: + namespaceprefix_ = ( + self.Labels_nsprefix_ + ":" + if (UseCapturedNS_ and self.Labels_nsprefix_) + else "" + ) + showIndent(outfile, level, pretty_print) + outfile.write( + "<%sLabels>%s%s" + % ( + namespaceprefix_, + self.gds_encode( + self.gds_format_string( + quote_xml(self.Labels), input_name="Labels" + ) + ), + namespaceprefix_, + eol_, + ) + ) + if self.LabelData is not None: + namespaceprefix_ = ( + self.LabelData_nsprefix_ + ":" + if (UseCapturedNS_ and self.LabelData_nsprefix_) + else "" + ) + self.LabelData.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="LabelData", + pretty_print=pretty_print, + ) + if self.CustomsInvoice is not None: + namespaceprefix_ = ( + self.CustomsInvoice_nsprefix_ + ":" + if (UseCapturedNS_ and self.CustomsInvoice_nsprefix_) + else "" + ) + showIndent(outfile, level, pretty_print) + outfile.write( + "<%sCustomsInvoice>%s%s" + % ( + namespaceprefix_, + self.gds_encode( + self.gds_format_string( + quote_xml(self.CustomsInvoice), input_name="CustomsInvoice" + ) + ), + namespaceprefix_, + eol_, + ) + ) + if self.Quote is not None: + namespaceprefix_ = ( + self.Quote_nsprefix_ + ":" + if (UseCapturedNS_ and self.Quote_nsprefix_) + else "" + ) + self.Quote.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Quote", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + pass + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "Order": + obj_ = OrderType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Order = obj_ + obj_.original_tagname_ = "Order" + elif nodeName_ == "Carrier": + obj_ = CarrierType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Carrier = obj_ + obj_.original_tagname_ = "Carrier" + elif nodeName_ == "Reference": + obj_ = ReferenceType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Reference = obj_ + obj_.original_tagname_ = "Reference" + elif nodeName_ == "Package": + obj_ = PackageType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Package.append(obj_) + obj_.original_tagname_ = "Package" + elif nodeName_ == "Pickup": + obj_ = PickupType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Pickup = obj_ + obj_.original_tagname_ = "Pickup" + elif nodeName_ == "TrackingURL": + value_ = child_.text + value_ = self.gds_parse_string(value_, node, "TrackingURL") + value_ = self.gds_validate_string(value_, node, "TrackingURL") + self.TrackingURL = value_ + self.TrackingURL_nsprefix_ = child_.prefix + elif nodeName_ == "Labels": + value_ = child_.text + value_ = self.gds_parse_string(value_, node, "Labels") + value_ = self.gds_validate_string(value_, node, "Labels") + self.Labels = value_ + self.Labels_nsprefix_ = child_.prefix + elif nodeName_ == "LabelData": + obj_ = LabelDataType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.LabelData = obj_ + obj_.original_tagname_ = "LabelData" + elif nodeName_ == "CustomsInvoice": + value_ = child_.text + value_ = self.gds_parse_string(value_, node, "CustomsInvoice") + value_ = self.gds_validate_string(value_, node, "CustomsInvoice") + self.CustomsInvoice = value_ + self.CustomsInvoice_nsprefix_ = child_.prefix + elif nodeName_ == "Quote": + obj_ = QuoteType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Quote = obj_ + obj_.original_tagname_ = "Quote" + + +# end class ShippingReplyType + + +class OrderType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__(self, id=None, valueOf_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.id = _cast(None, id) + self.id_nsprefix_ = None + self.valueOf_ = valueOf_ + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, OrderType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if OrderType.subclass: + return OrderType.subclass(*args_, **kwargs_) + else: + return OrderType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_id(self): + return self.id + + def set_id(self, id): + self.id = id + + def get_valueOf_(self): + return self.valueOf_ + + def set_valueOf_(self, valueOf_): + self.valueOf_ = valueOf_ + + def _hasContent(self): + if 1 if type(self.valueOf_) in [int, float] else self.valueOf_: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="OrderType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("OrderType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "OrderType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="OrderType" + ) + if self._hasContent(): + outfile.write(">") + outfile.write(self.convert_unicode(self.valueOf_)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="OrderType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="OrderType" + ): + if self.id is not None and "id" not in already_processed: + already_processed.add("id") + outfile.write( + " id=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.id), input_name="id") + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="OrderType", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("id", node) + if value is not None and "id" not in already_processed: + already_processed.add("id") + self.id = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class OrderType + + +class CarrierType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, + carrierName=None, + serviceName=None, + valueOf_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.carrierName = _cast(None, carrierName) + self.carrierName_nsprefix_ = None + self.serviceName = _cast(None, serviceName) + self.serviceName_nsprefix_ = None + self.valueOf_ = valueOf_ + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, CarrierType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CarrierType.subclass: + return CarrierType.subclass(*args_, **kwargs_) + else: + return CarrierType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_carrierName(self): + return self.carrierName + + def set_carrierName(self, carrierName): + self.carrierName = carrierName + + def get_serviceName(self): + return self.serviceName + + def set_serviceName(self, serviceName): + self.serviceName = serviceName + + def get_valueOf_(self): + return self.valueOf_ + + def set_valueOf_(self, valueOf_): + self.valueOf_ = valueOf_ + + def _hasContent(self): + if 1 if type(self.valueOf_) in [int, float] else self.valueOf_: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="CarrierType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("CarrierType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "CarrierType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="CarrierType" + ) + if self._hasContent(): + outfile.write(">") + outfile.write(self.convert_unicode(self.valueOf_)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="CarrierType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="CarrierType", + ): + if self.carrierName is not None and "carrierName" not in already_processed: + already_processed.add("carrierName") + outfile.write( + " carrierName=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.carrierName), input_name="carrierName" + ) + ), + ) + ) + if self.serviceName is not None and "serviceName" not in already_processed: + already_processed.add("serviceName") + outfile.write( + " serviceName=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.serviceName), input_name="serviceName" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="CarrierType", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("carrierName", node) + if value is not None and "carrierName" not in already_processed: + already_processed.add("carrierName") + self.carrierName = value + value = find_attr_value_("serviceName", node) + if value is not None and "serviceName" not in already_processed: + already_processed.add("serviceName") + self.serviceName = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class CarrierType + + +class ReferenceType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, code=None, name=None, valueOf_=None, gds_collector_=None, **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.code = _cast(None, code) + self.code_nsprefix_ = None + self.name = _cast(None, name) + self.name_nsprefix_ = None + self.valueOf_ = valueOf_ + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, ReferenceType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ReferenceType.subclass: + return ReferenceType.subclass(*args_, **kwargs_) + else: + return ReferenceType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_code(self): + return self.code + + def set_code(self, code): + self.code = code + + def get_name(self): + return self.name + + def set_name(self, name): + self.name = name + + def get_valueOf_(self): + return self.valueOf_ + + def set_valueOf_(self, valueOf_): + self.valueOf_ = valueOf_ + + def _hasContent(self): + if 1 if type(self.valueOf_) in [int, float] else self.valueOf_: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ReferenceType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ReferenceType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ReferenceType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ReferenceType" + ) + if self._hasContent(): + outfile.write(">") + outfile.write(self.convert_unicode(self.valueOf_)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ReferenceType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ReferenceType", + ): + if self.code is not None and "code" not in already_processed: + already_processed.add("code") + outfile.write( + " code=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.code), input_name="code" + ) + ), + ) + ) + if self.name is not None and "name" not in already_processed: + already_processed.add("name") + outfile.write( + " name=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.name), input_name="name" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ReferenceType", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("code", node) + if value is not None and "code" not in already_processed: + already_processed.add("code") + self.code = value + value = find_attr_value_("name", node) + if value is not None and "name" not in already_processed: + already_processed.add("name") + self.name = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class ReferenceType + + +class PackageType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, trackingNumber=None, valueOf_=None, gds_collector_=None, **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.trackingNumber = _cast(None, trackingNumber) + self.trackingNumber_nsprefix_ = None + self.valueOf_ = valueOf_ + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, PackageType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PackageType.subclass: + return PackageType.subclass(*args_, **kwargs_) + else: + return PackageType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_trackingNumber(self): + return self.trackingNumber + + def set_trackingNumber(self, trackingNumber): + self.trackingNumber = trackingNumber + + def get_valueOf_(self): + return self.valueOf_ + + def set_valueOf_(self, valueOf_): + self.valueOf_ = valueOf_ + + def _hasContent(self): + if 1 if type(self.valueOf_) in [int, float] else self.valueOf_: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PackageType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("PackageType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "PackageType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="PackageType" + ) + if self._hasContent(): + outfile.write(">") + outfile.write(self.convert_unicode(self.valueOf_)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="PackageType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="PackageType", + ): + if ( + self.trackingNumber is not None + and "trackingNumber" not in already_processed + ): + already_processed.add("trackingNumber") + outfile.write( + " trackingNumber=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.trackingNumber), + input_name="trackingNumber", + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PackageType", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("trackingNumber", node) + if value is not None and "trackingNumber" not in already_processed: + already_processed.add("trackingNumber") + self.trackingNumber = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class PackageType + + +class PickupType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, confirmationNumber=None, valueOf_=None, gds_collector_=None, **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.confirmationNumber = _cast(None, confirmationNumber) + self.confirmationNumber_nsprefix_ = None + self.valueOf_ = valueOf_ + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, PickupType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PickupType.subclass: + return PickupType.subclass(*args_, **kwargs_) + else: + return PickupType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_confirmationNumber(self): + return self.confirmationNumber + + def set_confirmationNumber(self, confirmationNumber): + self.confirmationNumber = confirmationNumber + + def get_valueOf_(self): + return self.valueOf_ + + def set_valueOf_(self, valueOf_): + self.valueOf_ = valueOf_ + + def _hasContent(self): + if 1 if type(self.valueOf_) in [int, float] else self.valueOf_: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PickupType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("PickupType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "PickupType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="PickupType" + ) + if self._hasContent(): + outfile.write(">") + outfile.write(self.convert_unicode(self.valueOf_)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="PickupType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="PickupType" + ): + if ( + self.confirmationNumber is not None + and "confirmationNumber" not in already_processed + ): + already_processed.add("confirmationNumber") + outfile.write( + " confirmationNumber=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.confirmationNumber), + input_name="confirmationNumber", + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PickupType", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("confirmationNumber", node) + if value is not None and "confirmationNumber" not in already_processed: + already_processed.add("confirmationNumber") + self.confirmationNumber = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class PickupType + + +class LabelDataType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__(self, Label=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + if Label is None: + self.Label = [] + else: + self.Label = Label + self.Label_nsprefix_ = None + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, LabelDataType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if LabelDataType.subclass: + return LabelDataType.subclass(*args_, **kwargs_) + else: + return LabelDataType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_Label(self): + return self.Label + + def set_Label(self, Label): + self.Label = Label + + def add_Label(self, value): + self.Label.append(value) + + def insert_Label_at(self, index, value): + self.Label.insert(index, value) + + def replace_Label_at(self, index, value): + self.Label[index] = value + + def _hasContent(self): + if self.Label: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="LabelDataType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("LabelDataType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "LabelDataType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="LabelDataType" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="LabelDataType", + pretty_print=pretty_print, + ) + showIndent(outfile, level, pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="LabelDataType", + ): + pass + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="LabelDataType", + fromsubclass_=False, + pretty_print=True, + ): + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + for Label_ in self.Label: + namespaceprefix_ = ( + self.Label_nsprefix_ + ":" + if (UseCapturedNS_ and self.Label_nsprefix_) + else "" + ) + Label_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Label", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + pass + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "Label": + obj_ = LabelType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Label.append(obj_) + obj_.original_tagname_ = "Label" + + +# end class LabelDataType + + +class LabelType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__(self, Type=None, Data=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.Type = Type + self.Type_nsprefix_ = None + self.Data = Data + self.Data_nsprefix_ = None + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, LabelType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if LabelType.subclass: + return LabelType.subclass(*args_, **kwargs_) + else: + return LabelType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_Type(self): + return self.Type + + def set_Type(self, Type): + self.Type = Type + + def get_Data(self): + return self.Data + + def set_Data(self, Data): + self.Data = Data + + def _hasContent(self): + if self.Type is not None or self.Data is not None: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="LabelType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("LabelType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "LabelType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="LabelType" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="LabelType", + pretty_print=pretty_print, + ) + showIndent(outfile, level, pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="LabelType" + ): + pass + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="LabelType", + fromsubclass_=False, + pretty_print=True, + ): + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.Type is not None: + namespaceprefix_ = ( + self.Type_nsprefix_ + ":" + if (UseCapturedNS_ and self.Type_nsprefix_) + else "" + ) + showIndent(outfile, level, pretty_print) + outfile.write( + "<%sType>%s%s" + % ( + namespaceprefix_, + self.gds_encode( + self.gds_format_string(quote_xml(self.Type), input_name="Type") + ), + namespaceprefix_, + eol_, + ) + ) + if self.Data is not None: + namespaceprefix_ = ( + self.Data_nsprefix_ + ":" + if (UseCapturedNS_ and self.Data_nsprefix_) + else "" + ) + showIndent(outfile, level, pretty_print) + outfile.write( + "<%sData>%s%s" + % ( + namespaceprefix_, + self.gds_encode( + self.gds_format_string(quote_xml(self.Data), input_name="Data") + ), + namespaceprefix_, + eol_, + ) + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + pass + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "Type": + value_ = child_.text + value_ = self.gds_parse_string(value_, node, "Type") + value_ = self.gds_validate_string(value_, node, "Type") + self.Type = value_ + self.Type_nsprefix_ = child_.prefix + elif nodeName_ == "Data": + value_ = child_.text + value_ = self.gds_parse_string(value_, node, "Data") + value_ = self.gds_validate_string(value_, node, "Data") + self.Data = value_ + self.Data_nsprefix_ = child_.prefix + + +# end class LabelType + + +class QuoteType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, + carrierId=None, + carrierName=None, + serviceId=None, + serviceName=None, + modeTransport=None, + transitDays=None, + baseCharge=None, + fuelSurcharge=None, + totalCharge=None, + currency=None, + Surcharge=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.carrierId = _cast(int, carrierId) + self.carrierId_nsprefix_ = None + self.carrierName = _cast(None, carrierName) + self.carrierName_nsprefix_ = None + self.serviceId = _cast(int, serviceId) + self.serviceId_nsprefix_ = None + self.serviceName = _cast(None, serviceName) + self.serviceName_nsprefix_ = None + self.modeTransport = _cast(None, modeTransport) + self.modeTransport_nsprefix_ = None + self.transitDays = _cast(int, transitDays) + self.transitDays_nsprefix_ = None + self.baseCharge = _cast(float, baseCharge) + self.baseCharge_nsprefix_ = None + self.fuelSurcharge = _cast(float, fuelSurcharge) + self.fuelSurcharge_nsprefix_ = None + self.totalCharge = _cast(float, totalCharge) + self.totalCharge_nsprefix_ = None + self.currency = _cast(None, currency) + self.currency_nsprefix_ = None + if Surcharge is None: + self.Surcharge = [] + else: + self.Surcharge = Surcharge + self.Surcharge_nsprefix_ = None + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, QuoteType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if QuoteType.subclass: + return QuoteType.subclass(*args_, **kwargs_) + else: + return QuoteType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_Surcharge(self): + return self.Surcharge + + def set_Surcharge(self, Surcharge): + self.Surcharge = Surcharge + + def add_Surcharge(self, value): + self.Surcharge.append(value) + + def insert_Surcharge_at(self, index, value): + self.Surcharge.insert(index, value) + + def replace_Surcharge_at(self, index, value): + self.Surcharge[index] = value + + def get_carrierId(self): + return self.carrierId + + def set_carrierId(self, carrierId): + self.carrierId = carrierId + + def get_carrierName(self): + return self.carrierName + + def set_carrierName(self, carrierName): + self.carrierName = carrierName + + def get_serviceId(self): + return self.serviceId + + def set_serviceId(self, serviceId): + self.serviceId = serviceId + + def get_serviceName(self): + return self.serviceName + + def set_serviceName(self, serviceName): + self.serviceName = serviceName + + def get_modeTransport(self): + return self.modeTransport + + def set_modeTransport(self, modeTransport): + self.modeTransport = modeTransport + + def get_transitDays(self): + return self.transitDays + + def set_transitDays(self, transitDays): + self.transitDays = transitDays + + def get_baseCharge(self): + return self.baseCharge + + def set_baseCharge(self, baseCharge): + self.baseCharge = baseCharge + + def get_fuelSurcharge(self): + return self.fuelSurcharge + + def set_fuelSurcharge(self, fuelSurcharge): + self.fuelSurcharge = fuelSurcharge + + def get_totalCharge(self): + return self.totalCharge + + def set_totalCharge(self, totalCharge): + self.totalCharge = totalCharge + + def get_currency(self): + return self.currency + + def set_currency(self, currency): + self.currency = currency + + def _hasContent(self): + if self.Surcharge: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="QuoteType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("QuoteType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "QuoteType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="QuoteType" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="QuoteType", + pretty_print=pretty_print, + ) + showIndent(outfile, level, pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="QuoteType" + ): + if self.carrierId is not None and "carrierId" not in already_processed: + already_processed.add("carrierId") + outfile.write( + ' carrierId="%s"' + % self.gds_format_integer(self.carrierId, input_name="carrierId") + ) + if self.carrierName is not None and "carrierName" not in already_processed: + already_processed.add("carrierName") + outfile.write( + " carrierName=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.carrierName), input_name="carrierName" + ) + ), + ) + ) + if self.serviceId is not None and "serviceId" not in already_processed: + already_processed.add("serviceId") + outfile.write( + ' serviceId="%s"' + % self.gds_format_integer(self.serviceId, input_name="serviceId") + ) + if self.serviceName is not None and "serviceName" not in already_processed: + already_processed.add("serviceName") + outfile.write( + " serviceName=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.serviceName), input_name="serviceName" + ) + ), + ) + ) + if self.modeTransport is not None and "modeTransport" not in already_processed: + already_processed.add("modeTransport") + outfile.write( + " modeTransport=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.modeTransport), input_name="modeTransport" + ) + ), + ) + ) + if self.transitDays is not None and "transitDays" not in already_processed: + already_processed.add("transitDays") + outfile.write( + ' transitDays="%s"' + % self.gds_format_integer(self.transitDays, input_name="transitDays") + ) + if self.baseCharge is not None and "baseCharge" not in already_processed: + already_processed.add("baseCharge") + outfile.write( + ' baseCharge="%s"' + % self.gds_format_float(self.baseCharge, input_name="baseCharge") + ) + if self.fuelSurcharge is not None and "fuelSurcharge" not in already_processed: + already_processed.add("fuelSurcharge") + outfile.write( + ' fuelSurcharge="%s"' + % self.gds_format_float(self.fuelSurcharge, input_name="fuelSurcharge") + ) + if self.totalCharge is not None and "totalCharge" not in already_processed: + already_processed.add("totalCharge") + outfile.write( + ' totalCharge="%s"' + % self.gds_format_float(self.totalCharge, input_name="totalCharge") + ) + if self.currency is not None and "currency" not in already_processed: + already_processed.add("currency") + outfile.write( + " currency=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.currency), input_name="currency" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="QuoteType", + fromsubclass_=False, + pretty_print=True, + ): + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + for Surcharge_ in self.Surcharge: + namespaceprefix_ = ( + self.Surcharge_nsprefix_ + ":" + if (UseCapturedNS_ and self.Surcharge_nsprefix_) + else "" + ) + Surcharge_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Surcharge", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("carrierId", node) + if value is not None and "carrierId" not in already_processed: + already_processed.add("carrierId") + self.carrierId = self.gds_parse_integer(value, node, "carrierId") + value = find_attr_value_("carrierName", node) + if value is not None and "carrierName" not in already_processed: + already_processed.add("carrierName") + self.carrierName = value + value = find_attr_value_("serviceId", node) + if value is not None and "serviceId" not in already_processed: + already_processed.add("serviceId") + self.serviceId = self.gds_parse_integer(value, node, "serviceId") + value = find_attr_value_("serviceName", node) + if value is not None and "serviceName" not in already_processed: + already_processed.add("serviceName") + self.serviceName = value + value = find_attr_value_("modeTransport", node) + if value is not None and "modeTransport" not in already_processed: + already_processed.add("modeTransport") + self.modeTransport = value + value = find_attr_value_("transitDays", node) + if value is not None and "transitDays" not in already_processed: + already_processed.add("transitDays") + self.transitDays = self.gds_parse_integer(value, node, "transitDays") + value = find_attr_value_("baseCharge", node) + if value is not None and "baseCharge" not in already_processed: + already_processed.add("baseCharge") + value = self.gds_parse_float(value, node, "baseCharge") + self.baseCharge = value + value = find_attr_value_("fuelSurcharge", node) + if value is not None and "fuelSurcharge" not in already_processed: + already_processed.add("fuelSurcharge") + value = self.gds_parse_float(value, node, "fuelSurcharge") + self.fuelSurcharge = value + value = find_attr_value_("totalCharge", node) + if value is not None and "totalCharge" not in already_processed: + already_processed.add("totalCharge") + value = self.gds_parse_float(value, node, "totalCharge") + self.totalCharge = value + value = find_attr_value_("currency", node) + if value is not None and "currency" not in already_processed: + already_processed.add("currency") + self.currency = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "Surcharge": + obj_ = SurchargeType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Surcharge.append(obj_) + obj_.original_tagname_ = "Surcharge" + + +# end class QuoteType + + +class SurchargeType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, + id=None, + name=None, + amount=None, + valueOf_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.id = _cast(None, id) + self.id_nsprefix_ = None + self.name = _cast(None, name) + self.name_nsprefix_ = None + self.amount = _cast(float, amount) + self.amount_nsprefix_ = None + self.valueOf_ = valueOf_ + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, SurchargeType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if SurchargeType.subclass: + return SurchargeType.subclass(*args_, **kwargs_) + else: + return SurchargeType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_id(self): + return self.id + + def set_id(self, id): + self.id = id + + def get_name(self): + return self.name + + def set_name(self, name): + self.name = name + + def get_amount(self): + return self.amount + + def set_amount(self, amount): + self.amount = amount + + def get_valueOf_(self): + return self.valueOf_ + + def set_valueOf_(self, valueOf_): + self.valueOf_ = valueOf_ + + def _hasContent(self): + if 1 if type(self.valueOf_) in [int, float] else self.valueOf_: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SurchargeType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SurchargeType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "SurchargeType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SurchargeType" + ) + if self._hasContent(): + outfile.write(">") + outfile.write(self.convert_unicode(self.valueOf_)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SurchargeType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SurchargeType", + ): + if self.id is not None and "id" not in already_processed: + already_processed.add("id") + outfile.write( + " id=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.id), input_name="id") + ), + ) + ) + if self.name is not None and "name" not in already_processed: + already_processed.add("name") + outfile.write( + " name=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.name), input_name="name" + ) + ), + ) + ) + if self.amount is not None and "amount" not in already_processed: + already_processed.add("amount") + outfile.write( + ' amount="%s"' % self.gds_format_float(self.amount, input_name="amount") + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SurchargeType", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("id", node) + if value is not None and "id" not in already_processed: + already_processed.add("id") + self.id = value + value = find_attr_value_("name", node) + if value is not None and "name" not in already_processed: + already_processed.add("name") + self.name = value + value = find_attr_value_("amount", node) + if value is not None and "amount" not in already_processed: + already_processed.add("amount") + value = self.gds_parse_float(value, node, "amount") + self.amount = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class SurchargeType + + +GDSClassesMapping = {} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + """Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + """ + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = " ".join( + ['xmlns:{}="{}"'.format(prefix, uri) for prefix, uri in nsmap.items()] + ) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = "EShipper" + rootClass = EShipper + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, namespacedef_=namespacedefs, pretty_print=True + ) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree( + inFileName, + silence=False, + print_warnings=True, + mapping=None, + reverse_mapping=None, + nsmap=None, +): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = "EShipper" + rootClass = EShipper + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, + name_=rootTag, + mapping_=mapping, + reverse_mapping_=reverse_mapping, + nsmap_=nsmap, + ) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, xml_declaration=True, encoding="utf-8" + ) + sys.stdout.write(str(content)) + sys.stdout.write("\n") + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + """Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + """ + parser = None + rootNode = parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = "EShipper" + rootClass = EShipper + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export(sys.stdout, 0, name_=rootTag, namespacedef_="") + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = "EShipper" + rootClass = EShipper + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write("#from shipping_reply import *\n\n") + sys.stdout.write("import shipping_reply as model_\n\n") + sys.stdout.write("rootObj = model_.rootClass(\n") + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(")\n") + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == "__main__": + # import pdb; pdb.set_trace() + main() + +RenameMappings_ = {} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {"http://www.eshipper.net/xml/XMLSchema": []} + +__all__ = [ + "CarrierType", + "EShipper", + "LabelDataType", + "LabelType", + "OrderType", + "PackageType", + "PickupType", + "QuoteType", + "ReferenceType", + "ShippingReplyType", + "SurchargeType", +] diff --git a/modules/connectors/eshipper_xml/karrio/schemas/eshipper_xml/shipping_request.py b/modules/connectors/eshipper_xml/karrio/schemas/eshipper_xml/shipping_request.py new file mode 100644 index 0000000000..3f7dabf480 --- /dev/null +++ b/modules/connectors/eshipper_xml/karrio/schemas/eshipper_xml/shipping_request.py @@ -0,0 +1,7896 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Mon Oct 24 11:17:21 2022 by generateDS.py version 2.41.1. +# Python 3.10.6 (main, Aug 30 2022, 05:12:36) [Clang 13.1.6 (clang-1316.0.21.2.5)] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio.schemas.eshipper_xml/shipping_request.py') +# +# Command line arguments: +# ./vendor/schemas/shipping_request.xsd +# +# Command line: +# /Users/danielkobina/Workspace/project/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio.schemas.eshipper_xml/shipping_request.py" ./vendor/schemas/shipping_request.xsd +# +# Current working directory (os.getcwd()): +# eshipper_xml +# + +import sys + +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import ( + GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_, + ) +except ModulenotfoundExp_: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_: + + class GdsCollector_(object): + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_: + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r"(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$") + + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return self.__name + + def dst(self, dt): + return None + + def __str__(self): + settings = { + "str_pretty_print": True, + "str_indent_level": 0, + "str_namespaceprefix": "", + "str_name": self.__class__.__name__, + "str_namespacedefs": "", + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings["str_indent_level"], + pretty_print=settings["str_pretty_print"], + namespaceprefix_=settings["str_namespaceprefix"], + name_=settings["str_name"], + namespacedef_=settings["str_namespacedefs"], + ) + strval = output.getvalue() + output.close() + return strval + + def gds_format_string(self, input_data, input_name=""): + return input_data + + def gds_parse_string(self, input_data, node=None, input_name=""): + return input_data + + def gds_validate_string(self, input_data, node=None, input_name=""): + if not input_data: + return "" + else: + return input_data + + def gds_format_base64(self, input_data, input_name=""): + return base64.b64encode(input_data).decode("ascii") + + def gds_validate_base64(self, input_data, node=None, input_name=""): + return input_data + + def gds_format_integer(self, input_data, input_name=""): + return "%d" % int(input_data) + + def gds_parse_integer(self, input_data, node=None, input_name=""): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, "Requires integer value: %s" % exp) + return ival + + def gds_validate_integer(self, input_data, node=None, input_name=""): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires integer value") + return value + + def gds_format_integer_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_integer_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, "Requires sequence of integer values") + return values + + def gds_format_float(self, input_data, input_name=""): + return ("%.15f" % float(input_data)).rstrip("0") + + def gds_parse_float(self, input_data, node=None, input_name=""): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, "Requires float or double value: %s" % exp) + return fval_ + + def gds_validate_float(self, input_data, node=None, input_name=""): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires float value") + return value + + def gds_format_float_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_float_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, "Requires sequence of float values") + return values + + def gds_format_decimal(self, input_data, input_name=""): + return_value = "%s" % input_data + if "." in return_value: + return_value = return_value.rstrip("0") + if return_value.endswith("."): + return_value = return_value.rstrip(".") + return return_value + + def gds_parse_decimal(self, input_data, node=None, input_name=""): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires decimal value") + return decimal_value + + def gds_validate_decimal(self, input_data, node=None, input_name=""): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires decimal value") + return value + + def gds_format_decimal_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return " ".join([self.gds_format_decimal(item) for item in input_data]) + + def gds_validate_decimal_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, "Requires sequence of decimal values") + return values + + def gds_format_double(self, input_data, input_name=""): + return "%s" % input_data + + def gds_parse_double(self, input_data, node=None, input_name=""): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, "Requires double or float value: %s" % exp) + return fval_ + + def gds_validate_double(self, input_data, node=None, input_name=""): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires double or float value") + return value + + def gds_format_double_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_double_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, "Requires sequence of double or float values" + ) + return values + + def gds_format_boolean(self, input_data, input_name=""): + return ("%s" % input_data).lower() + + def gds_parse_boolean(self, input_data, node=None, input_name=""): + input_data = input_data.strip() + if input_data in ("true", "1"): + bval = True + elif input_data in ("false", "0"): + bval = False + else: + raise_parse_error(node, "Requires boolean value") + return bval + + def gds_validate_boolean(self, input_data, node=None, input_name=""): + if input_data not in ( + True, + 1, + False, + 0, + ): + raise_parse_error( + node, "Requires boolean value " "(one of True, 1, False, 0)" + ) + return input_data + + def gds_format_boolean_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_boolean_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in ( + True, + 1, + False, + 0, + ): + raise_parse_error( + node, + "Requires sequence of boolean values " + "(one of True, 1, False, 0)", + ) + return values + + def gds_validate_datetime(self, input_data, node=None, input_name=""): + return input_data + + def gds_format_datetime(self, input_data, input_name=""): + if input_data.microsecond == 0: + _svalue = "%04d-%02d-%02dT%02d:%02d:%02d" % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = "%04d-%02d-%02dT%02d:%02d:%02d.%s" % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ("%f" % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += "Z" + else: + if total_seconds < 0: + _svalue += "-" + total_seconds *= -1 + else: + _svalue += "+" + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += "{0:02d}:{1:02d}".format(hours, minutes) + return _svalue + + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == "Z": + tz = GeneratedsSuper._FixedOffsetTZ(0, "UTC") + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(":") + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == "-": + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ(tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split(".") + if len(time_parts) > 1: + micro_seconds = int(float("0." + time_parts[1]) * 1000000) + input_data = "%s.%s" % ( + time_parts[0], + "{}".format(micro_seconds).rjust(6, "0"), + ) + dt = datetime_.datetime.strptime(input_data, "%Y-%m-%dT%H:%M:%S.%f") + else: + dt = datetime_.datetime.strptime(input_data, "%Y-%m-%dT%H:%M:%S") + dt = dt.replace(tzinfo=tz) + return dt + + def gds_validate_date(self, input_data, node=None, input_name=""): + return input_data + + def gds_format_date(self, input_data, input_name=""): + _svalue = "%04d-%02d-%02d" % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += "Z" + else: + if total_seconds < 0: + _svalue += "-" + total_seconds *= -1 + else: + _svalue += "+" + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += "{0:02d}:{1:02d}".format(hours, minutes) + except AttributeError: + pass + return _svalue + + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == "Z": + tz = GeneratedsSuper._FixedOffsetTZ(0, "UTC") + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(":") + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == "-": + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ(tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, "%Y-%m-%d") + dt = dt.replace(tzinfo=tz) + return dt.date() + + def gds_validate_time(self, input_data, node=None, input_name=""): + return input_data + + def gds_format_time(self, input_data, input_name=""): + if input_data.microsecond == 0: + _svalue = "%02d:%02d:%02d" % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = "%02d:%02d:%02d.%s" % ( + input_data.hour, + input_data.minute, + input_data.second, + ("%f" % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += "Z" + else: + if total_seconds < 0: + _svalue += "-" + total_seconds *= -1 + else: + _svalue += "+" + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += "{0:02d}:{1:02d}".format(hours, minutes) + return _svalue + + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == "Z": + tz = GeneratedsSuper._FixedOffsetTZ(0, "UTC") + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(":") + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == "-": + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ(tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split(".")) > 1: + dt = datetime_.datetime.strptime(input_data, "%H:%M:%S.%f") + else: + dt = datetime_.datetime.strptime(input_data, "%H:%M:%S") + dt = dt.replace(tzinfo=tz) + return dt.time() + + def gds_check_cardinality_( + self, value, input_name, min_occurs=0, max_occurs=1, required=None + ): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None: + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_() + ) + ) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), min_occurs, length + ) + ) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), max_occurs, length + ) + ) + + def gds_validate_builtin_ST_( + self, + validator, + value, + input_name, + min_occurs=None, + max_occurs=None, + required=None, + ): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + + def gds_validate_defined_ST_( + self, + validator, + value, + input_name, + min_occurs=None, + max_occurs=None, + required=None, + ): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + + def gds_str_lower(self, instring): + return instring.lower() + + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = "/".join(path_list) + return path + + Tag_strip_pattern_ = re_.compile(r"\{.*\}") + + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub("", node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if "xsi" in node.nsmap: + classname = node.get("{%s}type" % node.nsmap["xsi"]) + if classname is not None: + names = classname.split(":") + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = "utf-8" + return instring.encode(encoding) + else: + return instring + + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode("utf8") + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + + def __eq__(self, other): + def excl_select_objs_(obj): + return obj[0] != "parent_object_" and obj[0] != "gds_collector_" + + if type(self) != type(other): + return False + return all( + x == y + for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()), + ) + ) + + def __ne__(self, other): + return not self.__eq__(other) + + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + + def gds_get_node_lineno_(self): + if ( + hasattr(self, "gds_elementtree_node_") + and self.gds_elementtree_node_ is not None + ): + return " near line {}".format(self.gds_elementtree_node_.sourceline) + else: + return "" + + def getSubclassFromModule_(module, class_): + """Get the subclass of a class from a specific module.""" + name = class_.__name__ + "Sub" + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = "" +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r"({.*})?(.*)") +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r"{(.*)}(.*)") +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(" ") + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return "" + s1 = isinstance(inStr, BaseStrType_) and inStr or "%s" % inStr + s2 = "" + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos : mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start() : mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace("&", "&") + s1 = s1.replace("<", "<") + s1 = s1.replace(">", ">") + return s1 + + +def quote_attrib(inStr): + s1 = isinstance(inStr, BaseStrType_) and inStr or "%s" % inStr + s1 = s1.replace("&", "&") + s1 = s1.replace("<", "<") + s1 = s1.replace(">", ">") + s1 = s1.replace("\n", " ") + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find("\n") == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find("\n") == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = "" + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(":") + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == "xml": + namespace = "http://www.w3.org/XML/1998/namespace" + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get( + "{%s}%s" + % ( + namespace, + name, + ) + ) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = "%s (element %s/line %d)" % ( + msg, + node.tag, + node.sourceline, + ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + + def getCategory(self): + return self.category + + def getContenttype(self, content_type): + return self.content_type + + def getValue(self): + return self.value + + def getName(self): + return self.name + + def export(self, outfile, level, name, namespace, pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, pretty_print=pretty_print + ) + + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write("<%s>%s" % (self.name, self.value, self.name)) + elif ( + self.content_type == MixedContainer.TypeInteger + or self.content_type == MixedContainer.TypeBoolean + ): + outfile.write("<%s>%d" % (self.name, self.value, self.name)) + elif ( + self.content_type == MixedContainer.TypeFloat + or self.content_type == MixedContainer.TypeDecimal + ): + outfile.write("<%s>%f" % (self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write("<%s>%g" % (self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write( + "<%s>%s" % (self.name, base64.b64encode(self.value), self.name) + ) + + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement(element, "%s" % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif ( + self.content_type == MixedContainer.TypeInteger + or self.content_type == MixedContainer.TypeBoolean + ): + text = "%d" % self.value + elif ( + self.content_type == MixedContainer.TypeFloat + or self.content_type == MixedContainer.TypeDecimal + ): + text = "%f" % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = "%g" % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = "%s" % base64.b64encode(self.value) + return text + + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' + % (self.category, self.content_type, self.name, self.value) + ) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' + % (self.category, self.content_type, self.name, self.value) + ) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' + % ( + self.category, + self.content_type, + self.name, + ) + ) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(")\n") + + +class MemberSpec_(object): + def __init__( + self, + name="", + data_type="", + container=0, + optional=0, + child_attrs=None, + choice=None, + ): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + + def set_name(self, name): + self.name = name + + def get_name(self): + return self.name + + def set_data_type(self, data_type): + self.data_type = data_type + + def get_data_type_chain(self): + return self.data_type + + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return "xs:string" + else: + return self.data_type + + def set_container(self, container): + self.container = container + + def get_container(self): + return self.container + + def set_child_attrs(self, child_attrs): + self.child_attrs = child_attrs + + def get_child_attrs(self): + return self.child_attrs + + def set_choice(self, choice): + self.choice = choice + + def get_choice(self): + return self.choice + + def set_optional(self, optional): + self.optional = optional + + def get_optional(self): + return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Data representation classes. +# + + +class EShipper(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, + username=None, + password=None, + version=None, + ShippingRequest=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.username = _cast(None, username) + self.username_nsprefix_ = None + self.password = _cast(None, password) + self.password_nsprefix_ = None + self.version = _cast(None, version) + self.version_nsprefix_ = None + self.ShippingRequest = ShippingRequest + self.ShippingRequest_nsprefix_ = None + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, EShipper) + if subclass is not None: + return subclass(*args_, **kwargs_) + if EShipper.subclass: + return EShipper.subclass(*args_, **kwargs_) + else: + return EShipper(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_ShippingRequest(self): + return self.ShippingRequest + + def set_ShippingRequest(self, ShippingRequest): + self.ShippingRequest = ShippingRequest + + def get_username(self): + return self.username + + def set_username(self, username): + self.username = username + + def get_password(self): + return self.password + + def set_password(self, password): + self.password = password + + def get_version(self): + return self.version + + def set_version(self, version): + self.version = version + + def _hasContent(self): + if self.ShippingRequest is not None: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="EShipper", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("EShipper") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "EShipper": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="EShipper" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="EShipper", + pretty_print=pretty_print, + ) + showIndent(outfile, level, pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="EShipper" + ): + if self.username is not None and "username" not in already_processed: + already_processed.add("username") + outfile.write( + " username=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.username), input_name="username" + ) + ), + ) + ) + if self.password is not None and "password" not in already_processed: + already_processed.add("password") + outfile.write( + " password=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.password), input_name="password" + ) + ), + ) + ) + if self.version is not None and "version" not in already_processed: + already_processed.add("version") + outfile.write( + " version=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.version), input_name="version" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="EShipper", + fromsubclass_=False, + pretty_print=True, + ): + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.ShippingRequest is not None: + namespaceprefix_ = ( + self.ShippingRequest_nsprefix_ + ":" + if (UseCapturedNS_ and self.ShippingRequest_nsprefix_) + else "" + ) + self.ShippingRequest.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="ShippingRequest", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("username", node) + if value is not None and "username" not in already_processed: + already_processed.add("username") + self.username = value + value = find_attr_value_("password", node) + if value is not None and "password" not in already_processed: + already_processed.add("password") + self.password = value + value = find_attr_value_("version", node) + if value is not None and "version" not in already_processed: + already_processed.add("version") + self.version = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "ShippingRequest": + obj_ = ShippingRequestType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ShippingRequest = obj_ + obj_.original_tagname_ = "ShippingRequest" + + +# end class EShipper + + +class ShippingRequestType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, + serviceId=None, + destinationCloseMin=None, + destinationCloseHrs=None, + originCloseTimeMin=None, + originCloseTimeHrs=None, + readyTimeMin=None, + readyTimeHrs=None, + SortandSegregateCharge=None, + homelandSecurity=None, + pierCharge=None, + exhibitionConventionSite=None, + militaryBaseDelivery=None, + customsIn_bondFreight=None, + limitedAccess=None, + excessLength=None, + crossBorderFee=None, + singleShipment=None, + insidePickup=None, + insuranceType=None, + scheduledShipDate=None, + insideDelivery=None, + deliveryAppointment=None, + specialEquipment=None, + holdForPickupRequired=None, + signatureRequired=None, + isSaturdayService=None, + dangerousGoodsType=None, + stackable=None, + From=None, + To=None, + Shipper=None, + COD=None, + Packages=None, + Pickup=None, + Payment=None, + Reference=None, + CustomsInvoice=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.serviceId = _cast(int, serviceId) + self.serviceId_nsprefix_ = None + self.destinationCloseMin = _cast(None, destinationCloseMin) + self.destinationCloseMin_nsprefix_ = None + self.destinationCloseHrs = _cast(None, destinationCloseHrs) + self.destinationCloseHrs_nsprefix_ = None + self.originCloseTimeMin = _cast(None, originCloseTimeMin) + self.originCloseTimeMin_nsprefix_ = None + self.originCloseTimeHrs = _cast(None, originCloseTimeHrs) + self.originCloseTimeHrs_nsprefix_ = None + self.readyTimeMin = _cast(None, readyTimeMin) + self.readyTimeMin_nsprefix_ = None + self.readyTimeHrs = _cast(None, readyTimeHrs) + self.readyTimeHrs_nsprefix_ = None + self.SortandSegregateCharge = _cast(None, SortandSegregateCharge) + self.SortandSegregateCharge_nsprefix_ = None + self.homelandSecurity = _cast(None, homelandSecurity) + self.homelandSecurity_nsprefix_ = None + self.pierCharge = _cast(None, pierCharge) + self.pierCharge_nsprefix_ = None + self.exhibitionConventionSite = _cast(None, exhibitionConventionSite) + self.exhibitionConventionSite_nsprefix_ = None + self.militaryBaseDelivery = _cast(None, militaryBaseDelivery) + self.militaryBaseDelivery_nsprefix_ = None + self.customsIn_bondFreight = _cast(None, customsIn_bondFreight) + self.customsIn_bondFreight_nsprefix_ = None + self.limitedAccess = _cast(None, limitedAccess) + self.limitedAccess_nsprefix_ = None + self.excessLength = _cast(None, excessLength) + self.excessLength_nsprefix_ = None + self.crossBorderFee = _cast(None, crossBorderFee) + self.crossBorderFee_nsprefix_ = None + self.singleShipment = _cast(None, singleShipment) + self.singleShipment_nsprefix_ = None + self.insidePickup = _cast(None, insidePickup) + self.insidePickup_nsprefix_ = None + self.insuranceType = _cast(None, insuranceType) + self.insuranceType_nsprefix_ = None + self.scheduledShipDate = _cast(None, scheduledShipDate) + self.scheduledShipDate_nsprefix_ = None + self.insideDelivery = _cast(None, insideDelivery) + self.insideDelivery_nsprefix_ = None + self.deliveryAppointment = _cast(None, deliveryAppointment) + self.deliveryAppointment_nsprefix_ = None + self.specialEquipment = _cast(None, specialEquipment) + self.specialEquipment_nsprefix_ = None + self.holdForPickupRequired = _cast(None, holdForPickupRequired) + self.holdForPickupRequired_nsprefix_ = None + self.signatureRequired = _cast(None, signatureRequired) + self.signatureRequired_nsprefix_ = None + self.isSaturdayService = _cast(None, isSaturdayService) + self.isSaturdayService_nsprefix_ = None + self.dangerousGoodsType = _cast(None, dangerousGoodsType) + self.dangerousGoodsType_nsprefix_ = None + self.stackable = _cast(None, stackable) + self.stackable_nsprefix_ = None + self.From = From + self.From_nsprefix_ = None + self.To = To + self.To_nsprefix_ = None + self.Shipper = Shipper + self.Shipper_nsprefix_ = None + self.COD = COD + self.COD_nsprefix_ = None + self.Packages = Packages + self.Packages_nsprefix_ = None + self.Pickup = Pickup + self.Pickup_nsprefix_ = None + self.Payment = Payment + self.Payment_nsprefix_ = None + if Reference is None: + self.Reference = [] + else: + self.Reference = Reference + self.Reference_nsprefix_ = None + self.CustomsInvoice = CustomsInvoice + self.CustomsInvoice_nsprefix_ = None + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ShippingRequestType + ) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ShippingRequestType.subclass: + return ShippingRequestType.subclass(*args_, **kwargs_) + else: + return ShippingRequestType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_From(self): + return self.From + + def set_From(self, From): + self.From = From + + def get_To(self): + return self.To + + def set_To(self, To): + self.To = To + + def get_Shipper(self): + return self.Shipper + + def set_Shipper(self, Shipper): + self.Shipper = Shipper + + def get_COD(self): + return self.COD + + def set_COD(self, COD): + self.COD = COD + + def get_Packages(self): + return self.Packages + + def set_Packages(self, Packages): + self.Packages = Packages + + def get_Pickup(self): + return self.Pickup + + def set_Pickup(self, Pickup): + self.Pickup = Pickup + + def get_Payment(self): + return self.Payment + + def set_Payment(self, Payment): + self.Payment = Payment + + def get_Reference(self): + return self.Reference + + def set_Reference(self, Reference): + self.Reference = Reference + + def add_Reference(self, value): + self.Reference.append(value) + + def insert_Reference_at(self, index, value): + self.Reference.insert(index, value) + + def replace_Reference_at(self, index, value): + self.Reference[index] = value + + def get_CustomsInvoice(self): + return self.CustomsInvoice + + def set_CustomsInvoice(self, CustomsInvoice): + self.CustomsInvoice = CustomsInvoice + + def get_serviceId(self): + return self.serviceId + + def set_serviceId(self, serviceId): + self.serviceId = serviceId + + def get_destinationCloseMin(self): + return self.destinationCloseMin + + def set_destinationCloseMin(self, destinationCloseMin): + self.destinationCloseMin = destinationCloseMin + + def get_destinationCloseHrs(self): + return self.destinationCloseHrs + + def set_destinationCloseHrs(self, destinationCloseHrs): + self.destinationCloseHrs = destinationCloseHrs + + def get_originCloseTimeMin(self): + return self.originCloseTimeMin + + def set_originCloseTimeMin(self, originCloseTimeMin): + self.originCloseTimeMin = originCloseTimeMin + + def get_originCloseTimeHrs(self): + return self.originCloseTimeHrs + + def set_originCloseTimeHrs(self, originCloseTimeHrs): + self.originCloseTimeHrs = originCloseTimeHrs + + def get_readyTimeMin(self): + return self.readyTimeMin + + def set_readyTimeMin(self, readyTimeMin): + self.readyTimeMin = readyTimeMin + + def get_readyTimeHrs(self): + return self.readyTimeHrs + + def set_readyTimeHrs(self, readyTimeHrs): + self.readyTimeHrs = readyTimeHrs + + def get_SortandSegregateCharge(self): + return self.SortandSegregateCharge + + def set_SortandSegregateCharge(self, SortandSegregateCharge): + self.SortandSegregateCharge = SortandSegregateCharge + + def get_homelandSecurity(self): + return self.homelandSecurity + + def set_homelandSecurity(self, homelandSecurity): + self.homelandSecurity = homelandSecurity + + def get_pierCharge(self): + return self.pierCharge + + def set_pierCharge(self, pierCharge): + self.pierCharge = pierCharge + + def get_exhibitionConventionSite(self): + return self.exhibitionConventionSite + + def set_exhibitionConventionSite(self, exhibitionConventionSite): + self.exhibitionConventionSite = exhibitionConventionSite + + def get_militaryBaseDelivery(self): + return self.militaryBaseDelivery + + def set_militaryBaseDelivery(self, militaryBaseDelivery): + self.militaryBaseDelivery = militaryBaseDelivery + + def get_customsIn_bondFreight(self): + return self.customsIn_bondFreight + + def set_customsIn_bondFreight(self, customsIn_bondFreight): + self.customsIn_bondFreight = customsIn_bondFreight + + def get_limitedAccess(self): + return self.limitedAccess + + def set_limitedAccess(self, limitedAccess): + self.limitedAccess = limitedAccess + + def get_excessLength(self): + return self.excessLength + + def set_excessLength(self, excessLength): + self.excessLength = excessLength + + def get_crossBorderFee(self): + return self.crossBorderFee + + def set_crossBorderFee(self, crossBorderFee): + self.crossBorderFee = crossBorderFee + + def get_singleShipment(self): + return self.singleShipment + + def set_singleShipment(self, singleShipment): + self.singleShipment = singleShipment + + def get_insidePickup(self): + return self.insidePickup + + def set_insidePickup(self, insidePickup): + self.insidePickup = insidePickup + + def get_insuranceType(self): + return self.insuranceType + + def set_insuranceType(self, insuranceType): + self.insuranceType = insuranceType + + def get_scheduledShipDate(self): + return self.scheduledShipDate + + def set_scheduledShipDate(self, scheduledShipDate): + self.scheduledShipDate = scheduledShipDate + + def get_insideDelivery(self): + return self.insideDelivery + + def set_insideDelivery(self, insideDelivery): + self.insideDelivery = insideDelivery + + def get_deliveryAppointment(self): + return self.deliveryAppointment + + def set_deliveryAppointment(self, deliveryAppointment): + self.deliveryAppointment = deliveryAppointment + + def get_specialEquipment(self): + return self.specialEquipment + + def set_specialEquipment(self, specialEquipment): + self.specialEquipment = specialEquipment + + def get_holdForPickupRequired(self): + return self.holdForPickupRequired + + def set_holdForPickupRequired(self, holdForPickupRequired): + self.holdForPickupRequired = holdForPickupRequired + + def get_signatureRequired(self): + return self.signatureRequired + + def set_signatureRequired(self, signatureRequired): + self.signatureRequired = signatureRequired + + def get_isSaturdayService(self): + return self.isSaturdayService + + def set_isSaturdayService(self, isSaturdayService): + self.isSaturdayService = isSaturdayService + + def get_dangerousGoodsType(self): + return self.dangerousGoodsType + + def set_dangerousGoodsType(self, dangerousGoodsType): + self.dangerousGoodsType = dangerousGoodsType + + def get_stackable(self): + return self.stackable + + def set_stackable(self, stackable): + self.stackable = stackable + + def _hasContent(self): + if ( + self.From is not None + or self.To is not None + or self.Shipper is not None + or self.COD is not None + or self.Packages is not None + or self.Pickup is not None + or self.Payment is not None + or self.Reference + or self.CustomsInvoice is not None + ): + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ShippingRequestType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ShippingRequestType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ShippingRequestType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ShippingRequestType", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ShippingRequestType", + pretty_print=pretty_print, + ) + showIndent(outfile, level, pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ShippingRequestType", + ): + if self.serviceId is not None and "serviceId" not in already_processed: + already_processed.add("serviceId") + outfile.write( + ' serviceId="%s"' + % self.gds_format_integer(self.serviceId, input_name="serviceId") + ) + if ( + self.destinationCloseMin is not None + and "destinationCloseMin" not in already_processed + ): + already_processed.add("destinationCloseMin") + outfile.write( + " destinationCloseMin=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.destinationCloseMin), + input_name="destinationCloseMin", + ) + ), + ) + ) + if ( + self.destinationCloseHrs is not None + and "destinationCloseHrs" not in already_processed + ): + already_processed.add("destinationCloseHrs") + outfile.write( + " destinationCloseHrs=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.destinationCloseHrs), + input_name="destinationCloseHrs", + ) + ), + ) + ) + if ( + self.originCloseTimeMin is not None + and "originCloseTimeMin" not in already_processed + ): + already_processed.add("originCloseTimeMin") + outfile.write( + " originCloseTimeMin=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.originCloseTimeMin), + input_name="originCloseTimeMin", + ) + ), + ) + ) + if ( + self.originCloseTimeHrs is not None + and "originCloseTimeHrs" not in already_processed + ): + already_processed.add("originCloseTimeHrs") + outfile.write( + " originCloseTimeHrs=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.originCloseTimeHrs), + input_name="originCloseTimeHrs", + ) + ), + ) + ) + if self.readyTimeMin is not None and "readyTimeMin" not in already_processed: + already_processed.add("readyTimeMin") + outfile.write( + " readyTimeMin=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.readyTimeMin), input_name="readyTimeMin" + ) + ), + ) + ) + if self.readyTimeHrs is not None and "readyTimeHrs" not in already_processed: + already_processed.add("readyTimeHrs") + outfile.write( + " readyTimeHrs=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.readyTimeHrs), input_name="readyTimeHrs" + ) + ), + ) + ) + if ( + self.SortandSegregateCharge is not None + and "SortandSegregateCharge" not in already_processed + ): + already_processed.add("SortandSegregateCharge") + outfile.write( + " SortandSegregateCharge=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.SortandSegregateCharge), + input_name="SortandSegregateCharge", + ) + ), + ) + ) + if ( + self.homelandSecurity is not None + and "homelandSecurity" not in already_processed + ): + already_processed.add("homelandSecurity") + outfile.write( + " homelandSecurity=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.homelandSecurity), + input_name="homelandSecurity", + ) + ), + ) + ) + if self.pierCharge is not None and "pierCharge" not in already_processed: + already_processed.add("pierCharge") + outfile.write( + " pierCharge=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.pierCharge), input_name="pierCharge" + ) + ), + ) + ) + if ( + self.exhibitionConventionSite is not None + and "exhibitionConventionSite" not in already_processed + ): + already_processed.add("exhibitionConventionSite") + outfile.write( + " exhibitionConventionSite=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.exhibitionConventionSite), + input_name="exhibitionConventionSite", + ) + ), + ) + ) + if ( + self.militaryBaseDelivery is not None + and "militaryBaseDelivery" not in already_processed + ): + already_processed.add("militaryBaseDelivery") + outfile.write( + " militaryBaseDelivery=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.militaryBaseDelivery), + input_name="militaryBaseDelivery", + ) + ), + ) + ) + if ( + self.customsIn_bondFreight is not None + and "customsIn_bondFreight" not in already_processed + ): + already_processed.add("customsIn_bondFreight") + outfile.write( + " customsIn-bondFreight=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.customsIn_bondFreight), + input_name="customsIn-bondFreight", + ) + ), + ) + ) + if self.limitedAccess is not None and "limitedAccess" not in already_processed: + already_processed.add("limitedAccess") + outfile.write( + " limitedAccess=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.limitedAccess), input_name="limitedAccess" + ) + ), + ) + ) + if self.excessLength is not None and "excessLength" not in already_processed: + already_processed.add("excessLength") + outfile.write( + " excessLength=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.excessLength), input_name="excessLength" + ) + ), + ) + ) + if ( + self.crossBorderFee is not None + and "crossBorderFee" not in already_processed + ): + already_processed.add("crossBorderFee") + outfile.write( + " crossBorderFee=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.crossBorderFee), + input_name="crossBorderFee", + ) + ), + ) + ) + if ( + self.singleShipment is not None + and "singleShipment" not in already_processed + ): + already_processed.add("singleShipment") + outfile.write( + " singleShipment=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.singleShipment), + input_name="singleShipment", + ) + ), + ) + ) + if self.insidePickup is not None and "insidePickup" not in already_processed: + already_processed.add("insidePickup") + outfile.write( + " insidePickup=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.insidePickup), input_name="insidePickup" + ) + ), + ) + ) + if self.insuranceType is not None and "insuranceType" not in already_processed: + already_processed.add("insuranceType") + outfile.write( + " insuranceType=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.insuranceType), input_name="insuranceType" + ) + ), + ) + ) + if ( + self.scheduledShipDate is not None + and "scheduledShipDate" not in already_processed + ): + already_processed.add("scheduledShipDate") + outfile.write( + " scheduledShipDate=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.scheduledShipDate), + input_name="scheduledShipDate", + ) + ), + ) + ) + if ( + self.insideDelivery is not None + and "insideDelivery" not in already_processed + ): + already_processed.add("insideDelivery") + outfile.write( + " insideDelivery=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.insideDelivery), + input_name="insideDelivery", + ) + ), + ) + ) + if ( + self.deliveryAppointment is not None + and "deliveryAppointment" not in already_processed + ): + already_processed.add("deliveryAppointment") + outfile.write( + " deliveryAppointment=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.deliveryAppointment), + input_name="deliveryAppointment", + ) + ), + ) + ) + if ( + self.specialEquipment is not None + and "specialEquipment" not in already_processed + ): + already_processed.add("specialEquipment") + outfile.write( + " specialEquipment=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.specialEquipment), + input_name="specialEquipment", + ) + ), + ) + ) + if ( + self.holdForPickupRequired is not None + and "holdForPickupRequired" not in already_processed + ): + already_processed.add("holdForPickupRequired") + outfile.write( + " holdForPickupRequired=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.holdForPickupRequired), + input_name="holdForPickupRequired", + ) + ), + ) + ) + if ( + self.signatureRequired is not None + and "signatureRequired" not in already_processed + ): + already_processed.add("signatureRequired") + outfile.write( + " signatureRequired=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.signatureRequired), + input_name="signatureRequired", + ) + ), + ) + ) + if ( + self.isSaturdayService is not None + and "isSaturdayService" not in already_processed + ): + already_processed.add("isSaturdayService") + outfile.write( + " isSaturdayService=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.isSaturdayService), + input_name="isSaturdayService", + ) + ), + ) + ) + if ( + self.dangerousGoodsType is not None + and "dangerousGoodsType" not in already_processed + ): + already_processed.add("dangerousGoodsType") + outfile.write( + " dangerousGoodsType=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.dangerousGoodsType), + input_name="dangerousGoodsType", + ) + ), + ) + ) + if self.stackable is not None and "stackable" not in already_processed: + already_processed.add("stackable") + outfile.write( + " stackable=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.stackable), input_name="stackable" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ShippingRequestType", + fromsubclass_=False, + pretty_print=True, + ): + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.From is not None: + namespaceprefix_ = ( + self.From_nsprefix_ + ":" + if (UseCapturedNS_ and self.From_nsprefix_) + else "" + ) + self.From.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="From", + pretty_print=pretty_print, + ) + if self.To is not None: + namespaceprefix_ = ( + self.To_nsprefix_ + ":" + if (UseCapturedNS_ and self.To_nsprefix_) + else "" + ) + self.To.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="To", + pretty_print=pretty_print, + ) + if self.Shipper is not None: + namespaceprefix_ = ( + self.Shipper_nsprefix_ + ":" + if (UseCapturedNS_ and self.Shipper_nsprefix_) + else "" + ) + self.Shipper.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Shipper", + pretty_print=pretty_print, + ) + if self.COD is not None: + namespaceprefix_ = ( + self.COD_nsprefix_ + ":" + if (UseCapturedNS_ and self.COD_nsprefix_) + else "" + ) + self.COD.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="COD", + pretty_print=pretty_print, + ) + if self.Packages is not None: + namespaceprefix_ = ( + self.Packages_nsprefix_ + ":" + if (UseCapturedNS_ and self.Packages_nsprefix_) + else "" + ) + self.Packages.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Packages", + pretty_print=pretty_print, + ) + if self.Pickup is not None: + namespaceprefix_ = ( + self.Pickup_nsprefix_ + ":" + if (UseCapturedNS_ and self.Pickup_nsprefix_) + else "" + ) + self.Pickup.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Pickup", + pretty_print=pretty_print, + ) + if self.Payment is not None: + namespaceprefix_ = ( + self.Payment_nsprefix_ + ":" + if (UseCapturedNS_ and self.Payment_nsprefix_) + else "" + ) + self.Payment.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Payment", + pretty_print=pretty_print, + ) + for Reference_ in self.Reference: + namespaceprefix_ = ( + self.Reference_nsprefix_ + ":" + if (UseCapturedNS_ and self.Reference_nsprefix_) + else "" + ) + Reference_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Reference", + pretty_print=pretty_print, + ) + if self.CustomsInvoice is not None: + namespaceprefix_ = ( + self.CustomsInvoice_nsprefix_ + ":" + if (UseCapturedNS_ and self.CustomsInvoice_nsprefix_) + else "" + ) + self.CustomsInvoice.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="CustomsInvoice", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("serviceId", node) + if value is not None and "serviceId" not in already_processed: + already_processed.add("serviceId") + self.serviceId = self.gds_parse_integer(value, node, "serviceId") + value = find_attr_value_("destinationCloseMin", node) + if value is not None and "destinationCloseMin" not in already_processed: + already_processed.add("destinationCloseMin") + self.destinationCloseMin = value + value = find_attr_value_("destinationCloseHrs", node) + if value is not None and "destinationCloseHrs" not in already_processed: + already_processed.add("destinationCloseHrs") + self.destinationCloseHrs = value + value = find_attr_value_("originCloseTimeMin", node) + if value is not None and "originCloseTimeMin" not in already_processed: + already_processed.add("originCloseTimeMin") + self.originCloseTimeMin = value + value = find_attr_value_("originCloseTimeHrs", node) + if value is not None and "originCloseTimeHrs" not in already_processed: + already_processed.add("originCloseTimeHrs") + self.originCloseTimeHrs = value + value = find_attr_value_("readyTimeMin", node) + if value is not None and "readyTimeMin" not in already_processed: + already_processed.add("readyTimeMin") + self.readyTimeMin = value + value = find_attr_value_("readyTimeHrs", node) + if value is not None and "readyTimeHrs" not in already_processed: + already_processed.add("readyTimeHrs") + self.readyTimeHrs = value + value = find_attr_value_("SortandSegregateCharge", node) + if value is not None and "SortandSegregateCharge" not in already_processed: + already_processed.add("SortandSegregateCharge") + self.SortandSegregateCharge = value + value = find_attr_value_("homelandSecurity", node) + if value is not None and "homelandSecurity" not in already_processed: + already_processed.add("homelandSecurity") + self.homelandSecurity = value + value = find_attr_value_("pierCharge", node) + if value is not None and "pierCharge" not in already_processed: + already_processed.add("pierCharge") + self.pierCharge = value + value = find_attr_value_("exhibitionConventionSite", node) + if value is not None and "exhibitionConventionSite" not in already_processed: + already_processed.add("exhibitionConventionSite") + self.exhibitionConventionSite = value + value = find_attr_value_("militaryBaseDelivery", node) + if value is not None and "militaryBaseDelivery" not in already_processed: + already_processed.add("militaryBaseDelivery") + self.militaryBaseDelivery = value + value = find_attr_value_("customsIn-bondFreight", node) + if value is not None and "customsIn-bondFreight" not in already_processed: + already_processed.add("customsIn-bondFreight") + self.customsIn_bondFreight = value + value = find_attr_value_("limitedAccess", node) + if value is not None and "limitedAccess" not in already_processed: + already_processed.add("limitedAccess") + self.limitedAccess = value + value = find_attr_value_("excessLength", node) + if value is not None and "excessLength" not in already_processed: + already_processed.add("excessLength") + self.excessLength = value + value = find_attr_value_("crossBorderFee", node) + if value is not None and "crossBorderFee" not in already_processed: + already_processed.add("crossBorderFee") + self.crossBorderFee = value + value = find_attr_value_("singleShipment", node) + if value is not None and "singleShipment" not in already_processed: + already_processed.add("singleShipment") + self.singleShipment = value + value = find_attr_value_("insidePickup", node) + if value is not None and "insidePickup" not in already_processed: + already_processed.add("insidePickup") + self.insidePickup = value + value = find_attr_value_("insuranceType", node) + if value is not None and "insuranceType" not in already_processed: + already_processed.add("insuranceType") + self.insuranceType = value + value = find_attr_value_("scheduledShipDate", node) + if value is not None and "scheduledShipDate" not in already_processed: + already_processed.add("scheduledShipDate") + self.scheduledShipDate = value + value = find_attr_value_("insideDelivery", node) + if value is not None and "insideDelivery" not in already_processed: + already_processed.add("insideDelivery") + self.insideDelivery = value + value = find_attr_value_("deliveryAppointment", node) + if value is not None and "deliveryAppointment" not in already_processed: + already_processed.add("deliveryAppointment") + self.deliveryAppointment = value + value = find_attr_value_("specialEquipment", node) + if value is not None and "specialEquipment" not in already_processed: + already_processed.add("specialEquipment") + self.specialEquipment = value + value = find_attr_value_("holdForPickupRequired", node) + if value is not None and "holdForPickupRequired" not in already_processed: + already_processed.add("holdForPickupRequired") + self.holdForPickupRequired = value + value = find_attr_value_("signatureRequired", node) + if value is not None and "signatureRequired" not in already_processed: + already_processed.add("signatureRequired") + self.signatureRequired = value + value = find_attr_value_("isSaturdayService", node) + if value is not None and "isSaturdayService" not in already_processed: + already_processed.add("isSaturdayService") + self.isSaturdayService = value + value = find_attr_value_("dangerousGoodsType", node) + if value is not None and "dangerousGoodsType" not in already_processed: + already_processed.add("dangerousGoodsType") + self.dangerousGoodsType = value + value = find_attr_value_("stackable", node) + if value is not None and "stackable" not in already_processed: + already_processed.add("stackable") + self.stackable = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "From": + obj_ = FromType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.From = obj_ + obj_.original_tagname_ = "From" + elif nodeName_ == "To": + obj_ = ToType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.To = obj_ + obj_.original_tagname_ = "To" + elif nodeName_ == "Shipper": + obj_ = ShipperType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Shipper = obj_ + obj_.original_tagname_ = "Shipper" + elif nodeName_ == "COD": + obj_ = CODType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.COD = obj_ + obj_.original_tagname_ = "COD" + elif nodeName_ == "Packages": + obj_ = PackagesType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Packages = obj_ + obj_.original_tagname_ = "Packages" + elif nodeName_ == "Pickup": + obj_ = PickupType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Pickup = obj_ + obj_.original_tagname_ = "Pickup" + elif nodeName_ == "Payment": + obj_ = PaymentType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Payment = obj_ + obj_.original_tagname_ = "Payment" + elif nodeName_ == "Reference": + obj_ = ReferenceType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Reference.append(obj_) + obj_.original_tagname_ = "Reference" + elif nodeName_ == "CustomsInvoice": + obj_ = CustomsInvoiceType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.CustomsInvoice = obj_ + obj_.original_tagname_ = "CustomsInvoice" + + +# end class ShippingRequestType + + +class FromType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, + id=None, + company=None, + email=None, + attention=None, + phone=None, + tailgateRequired=None, + residential=None, + confirmDelivery=None, + instructions=None, + address1=None, + address2=None, + city=None, + state=None, + country=None, + zip=None, + valueOf_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.id = _cast(None, id) + self.id_nsprefix_ = None + self.company = _cast(None, company) + self.company_nsprefix_ = None + self.email = _cast(None, email) + self.email_nsprefix_ = None + self.attention = _cast(None, attention) + self.attention_nsprefix_ = None + self.phone = _cast(None, phone) + self.phone_nsprefix_ = None + self.tailgateRequired = _cast(None, tailgateRequired) + self.tailgateRequired_nsprefix_ = None + self.residential = _cast(None, residential) + self.residential_nsprefix_ = None + self.confirmDelivery = _cast(None, confirmDelivery) + self.confirmDelivery_nsprefix_ = None + self.instructions = _cast(None, instructions) + self.instructions_nsprefix_ = None + self.address1 = _cast(None, address1) + self.address1_nsprefix_ = None + self.address2 = _cast(None, address2) + self.address2_nsprefix_ = None + self.city = _cast(None, city) + self.city_nsprefix_ = None + self.state = _cast(None, state) + self.state_nsprefix_ = None + self.country = _cast(None, country) + self.country_nsprefix_ = None + self.zip = _cast(None, zip) + self.zip_nsprefix_ = None + self.valueOf_ = valueOf_ + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, FromType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if FromType.subclass: + return FromType.subclass(*args_, **kwargs_) + else: + return FromType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_id(self): + return self.id + + def set_id(self, id): + self.id = id + + def get_company(self): + return self.company + + def set_company(self, company): + self.company = company + + def get_email(self): + return self.email + + def set_email(self, email): + self.email = email + + def get_attention(self): + return self.attention + + def set_attention(self, attention): + self.attention = attention + + def get_phone(self): + return self.phone + + def set_phone(self, phone): + self.phone = phone + + def get_tailgateRequired(self): + return self.tailgateRequired + + def set_tailgateRequired(self, tailgateRequired): + self.tailgateRequired = tailgateRequired + + def get_residential(self): + return self.residential + + def set_residential(self, residential): + self.residential = residential + + def get_confirmDelivery(self): + return self.confirmDelivery + + def set_confirmDelivery(self, confirmDelivery): + self.confirmDelivery = confirmDelivery + + def get_instructions(self): + return self.instructions + + def set_instructions(self, instructions): + self.instructions = instructions + + def get_address1(self): + return self.address1 + + def set_address1(self, address1): + self.address1 = address1 + + def get_address2(self): + return self.address2 + + def set_address2(self, address2): + self.address2 = address2 + + def get_city(self): + return self.city + + def set_city(self, city): + self.city = city + + def get_state(self): + return self.state + + def set_state(self, state): + self.state = state + + def get_country(self): + return self.country + + def set_country(self, country): + self.country = country + + def get_zip(self): + return self.zip + + def set_zip(self, zip): + self.zip = zip + + def get_valueOf_(self): + return self.valueOf_ + + def set_valueOf_(self, valueOf_): + self.valueOf_ = valueOf_ + + def _hasContent(self): + if 1 if type(self.valueOf_) in [int, float] else self.valueOf_: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="FromType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("FromType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "FromType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="FromType" + ) + if self._hasContent(): + outfile.write(">") + outfile.write(self.convert_unicode(self.valueOf_)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="FromType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="FromType" + ): + if self.id is not None and "id" not in already_processed: + already_processed.add("id") + outfile.write( + " id=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.id), input_name="id") + ), + ) + ) + if self.company is not None and "company" not in already_processed: + already_processed.add("company") + outfile.write( + " company=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.company), input_name="company" + ) + ), + ) + ) + if self.email is not None and "email" not in already_processed: + already_processed.add("email") + outfile.write( + " email=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.email), input_name="email" + ) + ), + ) + ) + if self.attention is not None and "attention" not in already_processed: + already_processed.add("attention") + outfile.write( + " attention=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.attention), input_name="attention" + ) + ), + ) + ) + if self.phone is not None and "phone" not in already_processed: + already_processed.add("phone") + outfile.write( + " phone=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.phone), input_name="phone" + ) + ), + ) + ) + if ( + self.tailgateRequired is not None + and "tailgateRequired" not in already_processed + ): + already_processed.add("tailgateRequired") + outfile.write( + " tailgateRequired=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.tailgateRequired), + input_name="tailgateRequired", + ) + ), + ) + ) + if self.residential is not None and "residential" not in already_processed: + already_processed.add("residential") + outfile.write( + " residential=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.residential), input_name="residential" + ) + ), + ) + ) + if ( + self.confirmDelivery is not None + and "confirmDelivery" not in already_processed + ): + already_processed.add("confirmDelivery") + outfile.write( + " confirmDelivery=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.confirmDelivery), + input_name="confirmDelivery", + ) + ), + ) + ) + if self.instructions is not None and "instructions" not in already_processed: + already_processed.add("instructions") + outfile.write( + " instructions=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.instructions), input_name="instructions" + ) + ), + ) + ) + if self.address1 is not None and "address1" not in already_processed: + already_processed.add("address1") + outfile.write( + " address1=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.address1), input_name="address1" + ) + ), + ) + ) + if self.address2 is not None and "address2" not in already_processed: + already_processed.add("address2") + outfile.write( + " address2=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.address2), input_name="address2" + ) + ), + ) + ) + if self.city is not None and "city" not in already_processed: + already_processed.add("city") + outfile.write( + " city=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.city), input_name="city" + ) + ), + ) + ) + if self.state is not None and "state" not in already_processed: + already_processed.add("state") + outfile.write( + " state=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.state), input_name="state" + ) + ), + ) + ) + if self.country is not None and "country" not in already_processed: + already_processed.add("country") + outfile.write( + " country=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.country), input_name="country" + ) + ), + ) + ) + if self.zip is not None and "zip" not in already_processed: + already_processed.add("zip") + outfile.write( + " zip=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.zip), input_name="zip") + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="FromType", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("id", node) + if value is not None and "id" not in already_processed: + already_processed.add("id") + self.id = value + value = find_attr_value_("company", node) + if value is not None and "company" not in already_processed: + already_processed.add("company") + self.company = value + value = find_attr_value_("email", node) + if value is not None and "email" not in already_processed: + already_processed.add("email") + self.email = value + value = find_attr_value_("attention", node) + if value is not None and "attention" not in already_processed: + already_processed.add("attention") + self.attention = value + value = find_attr_value_("phone", node) + if value is not None and "phone" not in already_processed: + already_processed.add("phone") + self.phone = value + value = find_attr_value_("tailgateRequired", node) + if value is not None and "tailgateRequired" not in already_processed: + already_processed.add("tailgateRequired") + self.tailgateRequired = value + value = find_attr_value_("residential", node) + if value is not None and "residential" not in already_processed: + already_processed.add("residential") + self.residential = value + value = find_attr_value_("confirmDelivery", node) + if value is not None and "confirmDelivery" not in already_processed: + already_processed.add("confirmDelivery") + self.confirmDelivery = value + value = find_attr_value_("instructions", node) + if value is not None and "instructions" not in already_processed: + already_processed.add("instructions") + self.instructions = value + value = find_attr_value_("address1", node) + if value is not None and "address1" not in already_processed: + already_processed.add("address1") + self.address1 = value + value = find_attr_value_("address2", node) + if value is not None and "address2" not in already_processed: + already_processed.add("address2") + self.address2 = value + value = find_attr_value_("city", node) + if value is not None and "city" not in already_processed: + already_processed.add("city") + self.city = value + value = find_attr_value_("state", node) + if value is not None and "state" not in already_processed: + already_processed.add("state") + self.state = value + value = find_attr_value_("country", node) + if value is not None and "country" not in already_processed: + already_processed.add("country") + self.country = value + value = find_attr_value_("zip", node) + if value is not None and "zip" not in already_processed: + already_processed.add("zip") + self.zip = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class FromType + + +class ToType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, + id=None, + company=None, + email=None, + notifyRecipient=None, + attention=None, + phone=None, + tailgateRequired=None, + residential=None, + instructions=None, + address1=None, + address2=None, + city=None, + state=None, + country=None, + zip=None, + valueOf_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.id = _cast(None, id) + self.id_nsprefix_ = None + self.company = _cast(None, company) + self.company_nsprefix_ = None + self.email = _cast(None, email) + self.email_nsprefix_ = None + self.notifyRecipient = _cast(None, notifyRecipient) + self.notifyRecipient_nsprefix_ = None + self.attention = _cast(None, attention) + self.attention_nsprefix_ = None + self.phone = _cast(None, phone) + self.phone_nsprefix_ = None + self.tailgateRequired = _cast(None, tailgateRequired) + self.tailgateRequired_nsprefix_ = None + self.residential = _cast(None, residential) + self.residential_nsprefix_ = None + self.instructions = _cast(None, instructions) + self.instructions_nsprefix_ = None + self.address1 = _cast(None, address1) + self.address1_nsprefix_ = None + self.address2 = _cast(None, address2) + self.address2_nsprefix_ = None + self.city = _cast(None, city) + self.city_nsprefix_ = None + self.state = _cast(None, state) + self.state_nsprefix_ = None + self.country = _cast(None, country) + self.country_nsprefix_ = None + self.zip = _cast(None, zip) + self.zip_nsprefix_ = None + self.valueOf_ = valueOf_ + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, ToType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ToType.subclass: + return ToType.subclass(*args_, **kwargs_) + else: + return ToType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_id(self): + return self.id + + def set_id(self, id): + self.id = id + + def get_company(self): + return self.company + + def set_company(self, company): + self.company = company + + def get_email(self): + return self.email + + def set_email(self, email): + self.email = email + + def get_notifyRecipient(self): + return self.notifyRecipient + + def set_notifyRecipient(self, notifyRecipient): + self.notifyRecipient = notifyRecipient + + def get_attention(self): + return self.attention + + def set_attention(self, attention): + self.attention = attention + + def get_phone(self): + return self.phone + + def set_phone(self, phone): + self.phone = phone + + def get_tailgateRequired(self): + return self.tailgateRequired + + def set_tailgateRequired(self, tailgateRequired): + self.tailgateRequired = tailgateRequired + + def get_residential(self): + return self.residential + + def set_residential(self, residential): + self.residential = residential + + def get_instructions(self): + return self.instructions + + def set_instructions(self, instructions): + self.instructions = instructions + + def get_address1(self): + return self.address1 + + def set_address1(self, address1): + self.address1 = address1 + + def get_address2(self): + return self.address2 + + def set_address2(self, address2): + self.address2 = address2 + + def get_city(self): + return self.city + + def set_city(self, city): + self.city = city + + def get_state(self): + return self.state + + def set_state(self, state): + self.state = state + + def get_country(self): + return self.country + + def set_country(self, country): + self.country = country + + def get_zip(self): + return self.zip + + def set_zip(self, zip): + self.zip = zip + + def get_valueOf_(self): + return self.valueOf_ + + def set_valueOf_(self, valueOf_): + self.valueOf_ = valueOf_ + + def _hasContent(self): + if 1 if type(self.valueOf_) in [int, float] else self.valueOf_: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ToType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ToType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ToType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ToType" + ) + if self._hasContent(): + outfile.write(">") + outfile.write(self.convert_unicode(self.valueOf_)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ToType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="ToType" + ): + if self.id is not None and "id" not in already_processed: + already_processed.add("id") + outfile.write( + " id=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.id), input_name="id") + ), + ) + ) + if self.company is not None and "company" not in already_processed: + already_processed.add("company") + outfile.write( + " company=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.company), input_name="company" + ) + ), + ) + ) + if self.email is not None and "email" not in already_processed: + already_processed.add("email") + outfile.write( + " email=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.email), input_name="email" + ) + ), + ) + ) + if ( + self.notifyRecipient is not None + and "notifyRecipient" not in already_processed + ): + already_processed.add("notifyRecipient") + outfile.write( + " notifyRecipient=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.notifyRecipient), + input_name="notifyRecipient", + ) + ), + ) + ) + if self.attention is not None and "attention" not in already_processed: + already_processed.add("attention") + outfile.write( + " attention=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.attention), input_name="attention" + ) + ), + ) + ) + if self.phone is not None and "phone" not in already_processed: + already_processed.add("phone") + outfile.write( + " phone=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.phone), input_name="phone" + ) + ), + ) + ) + if ( + self.tailgateRequired is not None + and "tailgateRequired" not in already_processed + ): + already_processed.add("tailgateRequired") + outfile.write( + " tailgateRequired=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.tailgateRequired), + input_name="tailgateRequired", + ) + ), + ) + ) + if self.residential is not None and "residential" not in already_processed: + already_processed.add("residential") + outfile.write( + " residential=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.residential), input_name="residential" + ) + ), + ) + ) + if self.instructions is not None and "instructions" not in already_processed: + already_processed.add("instructions") + outfile.write( + " instructions=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.instructions), input_name="instructions" + ) + ), + ) + ) + if self.address1 is not None and "address1" not in already_processed: + already_processed.add("address1") + outfile.write( + " address1=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.address1), input_name="address1" + ) + ), + ) + ) + if self.address2 is not None and "address2" not in already_processed: + already_processed.add("address2") + outfile.write( + " address2=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.address2), input_name="address2" + ) + ), + ) + ) + if self.city is not None and "city" not in already_processed: + already_processed.add("city") + outfile.write( + " city=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.city), input_name="city" + ) + ), + ) + ) + if self.state is not None and "state" not in already_processed: + already_processed.add("state") + outfile.write( + " state=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.state), input_name="state" + ) + ), + ) + ) + if self.country is not None and "country" not in already_processed: + already_processed.add("country") + outfile.write( + " country=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.country), input_name="country" + ) + ), + ) + ) + if self.zip is not None and "zip" not in already_processed: + already_processed.add("zip") + outfile.write( + " zip=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.zip), input_name="zip") + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ToType", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("id", node) + if value is not None and "id" not in already_processed: + already_processed.add("id") + self.id = value + value = find_attr_value_("company", node) + if value is not None and "company" not in already_processed: + already_processed.add("company") + self.company = value + value = find_attr_value_("email", node) + if value is not None and "email" not in already_processed: + already_processed.add("email") + self.email = value + value = find_attr_value_("notifyRecipient", node) + if value is not None and "notifyRecipient" not in already_processed: + already_processed.add("notifyRecipient") + self.notifyRecipient = value + value = find_attr_value_("attention", node) + if value is not None and "attention" not in already_processed: + already_processed.add("attention") + self.attention = value + value = find_attr_value_("phone", node) + if value is not None and "phone" not in already_processed: + already_processed.add("phone") + self.phone = value + value = find_attr_value_("tailgateRequired", node) + if value is not None and "tailgateRequired" not in already_processed: + already_processed.add("tailgateRequired") + self.tailgateRequired = value + value = find_attr_value_("residential", node) + if value is not None and "residential" not in already_processed: + already_processed.add("residential") + self.residential = value + value = find_attr_value_("instructions", node) + if value is not None and "instructions" not in already_processed: + already_processed.add("instructions") + self.instructions = value + value = find_attr_value_("address1", node) + if value is not None and "address1" not in already_processed: + already_processed.add("address1") + self.address1 = value + value = find_attr_value_("address2", node) + if value is not None and "address2" not in already_processed: + already_processed.add("address2") + self.address2 = value + value = find_attr_value_("city", node) + if value is not None and "city" not in already_processed: + already_processed.add("city") + self.city = value + value = find_attr_value_("state", node) + if value is not None and "state" not in already_processed: + already_processed.add("state") + self.state = value + value = find_attr_value_("country", node) + if value is not None and "country" not in already_processed: + already_processed.add("country") + self.country = value + value = find_attr_value_("zip", node) + if value is not None and "zip" not in already_processed: + already_processed.add("zip") + self.zip = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class ToType + + +class ShipperType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, + id=None, + company=None, + email=None, + notifyRecipient=None, + attention=None, + phone=None, + tailgateRequired=None, + residential=None, + instructions=None, + address1=None, + address2=None, + city=None, + state=None, + country=None, + zip=None, + valueOf_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.id = _cast(None, id) + self.id_nsprefix_ = None + self.company = _cast(None, company) + self.company_nsprefix_ = None + self.email = _cast(None, email) + self.email_nsprefix_ = None + self.notifyRecipient = _cast(None, notifyRecipient) + self.notifyRecipient_nsprefix_ = None + self.attention = _cast(None, attention) + self.attention_nsprefix_ = None + self.phone = _cast(None, phone) + self.phone_nsprefix_ = None + self.tailgateRequired = _cast(None, tailgateRequired) + self.tailgateRequired_nsprefix_ = None + self.residential = _cast(None, residential) + self.residential_nsprefix_ = None + self.instructions = _cast(None, instructions) + self.instructions_nsprefix_ = None + self.address1 = _cast(None, address1) + self.address1_nsprefix_ = None + self.address2 = _cast(None, address2) + self.address2_nsprefix_ = None + self.city = _cast(None, city) + self.city_nsprefix_ = None + self.state = _cast(None, state) + self.state_nsprefix_ = None + self.country = _cast(None, country) + self.country_nsprefix_ = None + self.zip = _cast(None, zip) + self.zip_nsprefix_ = None + self.valueOf_ = valueOf_ + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, ShipperType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ShipperType.subclass: + return ShipperType.subclass(*args_, **kwargs_) + else: + return ShipperType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_id(self): + return self.id + + def set_id(self, id): + self.id = id + + def get_company(self): + return self.company + + def set_company(self, company): + self.company = company + + def get_email(self): + return self.email + + def set_email(self, email): + self.email = email + + def get_notifyRecipient(self): + return self.notifyRecipient + + def set_notifyRecipient(self, notifyRecipient): + self.notifyRecipient = notifyRecipient + + def get_attention(self): + return self.attention + + def set_attention(self, attention): + self.attention = attention + + def get_phone(self): + return self.phone + + def set_phone(self, phone): + self.phone = phone + + def get_tailgateRequired(self): + return self.tailgateRequired + + def set_tailgateRequired(self, tailgateRequired): + self.tailgateRequired = tailgateRequired + + def get_residential(self): + return self.residential + + def set_residential(self, residential): + self.residential = residential + + def get_instructions(self): + return self.instructions + + def set_instructions(self, instructions): + self.instructions = instructions + + def get_address1(self): + return self.address1 + + def set_address1(self, address1): + self.address1 = address1 + + def get_address2(self): + return self.address2 + + def set_address2(self, address2): + self.address2 = address2 + + def get_city(self): + return self.city + + def set_city(self, city): + self.city = city + + def get_state(self): + return self.state + + def set_state(self, state): + self.state = state + + def get_country(self): + return self.country + + def set_country(self, country): + self.country = country + + def get_zip(self): + return self.zip + + def set_zip(self, zip): + self.zip = zip + + def get_valueOf_(self): + return self.valueOf_ + + def set_valueOf_(self, valueOf_): + self.valueOf_ = valueOf_ + + def _hasContent(self): + if 1 if type(self.valueOf_) in [int, float] else self.valueOf_: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ShipperType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ShipperType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ShipperType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ShipperType" + ) + if self._hasContent(): + outfile.write(">") + outfile.write(self.convert_unicode(self.valueOf_)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ShipperType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ShipperType", + ): + if self.id is not None and "id" not in already_processed: + already_processed.add("id") + outfile.write( + " id=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.id), input_name="id") + ), + ) + ) + if self.company is not None and "company" not in already_processed: + already_processed.add("company") + outfile.write( + " company=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.company), input_name="company" + ) + ), + ) + ) + if self.email is not None and "email" not in already_processed: + already_processed.add("email") + outfile.write( + " email=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.email), input_name="email" + ) + ), + ) + ) + if ( + self.notifyRecipient is not None + and "notifyRecipient" not in already_processed + ): + already_processed.add("notifyRecipient") + outfile.write( + " notifyRecipient=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.notifyRecipient), + input_name="notifyRecipient", + ) + ), + ) + ) + if self.attention is not None and "attention" not in already_processed: + already_processed.add("attention") + outfile.write( + " attention=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.attention), input_name="attention" + ) + ), + ) + ) + if self.phone is not None and "phone" not in already_processed: + already_processed.add("phone") + outfile.write( + " phone=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.phone), input_name="phone" + ) + ), + ) + ) + if ( + self.tailgateRequired is not None + and "tailgateRequired" not in already_processed + ): + already_processed.add("tailgateRequired") + outfile.write( + " tailgateRequired=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.tailgateRequired), + input_name="tailgateRequired", + ) + ), + ) + ) + if self.residential is not None and "residential" not in already_processed: + already_processed.add("residential") + outfile.write( + " residential=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.residential), input_name="residential" + ) + ), + ) + ) + if self.instructions is not None and "instructions" not in already_processed: + already_processed.add("instructions") + outfile.write( + " instructions=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.instructions), input_name="instructions" + ) + ), + ) + ) + if self.address1 is not None and "address1" not in already_processed: + already_processed.add("address1") + outfile.write( + " address1=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.address1), input_name="address1" + ) + ), + ) + ) + if self.address2 is not None and "address2" not in already_processed: + already_processed.add("address2") + outfile.write( + " address2=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.address2), input_name="address2" + ) + ), + ) + ) + if self.city is not None and "city" not in already_processed: + already_processed.add("city") + outfile.write( + " city=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.city), input_name="city" + ) + ), + ) + ) + if self.state is not None and "state" not in already_processed: + already_processed.add("state") + outfile.write( + " state=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.state), input_name="state" + ) + ), + ) + ) + if self.country is not None and "country" not in already_processed: + already_processed.add("country") + outfile.write( + " country=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.country), input_name="country" + ) + ), + ) + ) + if self.zip is not None and "zip" not in already_processed: + already_processed.add("zip") + outfile.write( + " zip=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.zip), input_name="zip") + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ShipperType", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("id", node) + if value is not None and "id" not in already_processed: + already_processed.add("id") + self.id = value + value = find_attr_value_("company", node) + if value is not None and "company" not in already_processed: + already_processed.add("company") + self.company = value + value = find_attr_value_("email", node) + if value is not None and "email" not in already_processed: + already_processed.add("email") + self.email = value + value = find_attr_value_("notifyRecipient", node) + if value is not None and "notifyRecipient" not in already_processed: + already_processed.add("notifyRecipient") + self.notifyRecipient = value + value = find_attr_value_("attention", node) + if value is not None and "attention" not in already_processed: + already_processed.add("attention") + self.attention = value + value = find_attr_value_("phone", node) + if value is not None and "phone" not in already_processed: + already_processed.add("phone") + self.phone = value + value = find_attr_value_("tailgateRequired", node) + if value is not None and "tailgateRequired" not in already_processed: + already_processed.add("tailgateRequired") + self.tailgateRequired = value + value = find_attr_value_("residential", node) + if value is not None and "residential" not in already_processed: + already_processed.add("residential") + self.residential = value + value = find_attr_value_("instructions", node) + if value is not None and "instructions" not in already_processed: + already_processed.add("instructions") + self.instructions = value + value = find_attr_value_("address1", node) + if value is not None and "address1" not in already_processed: + already_processed.add("address1") + self.address1 = value + value = find_attr_value_("address2", node) + if value is not None and "address2" not in already_processed: + already_processed.add("address2") + self.address2 = value + value = find_attr_value_("city", node) + if value is not None and "city" not in already_processed: + already_processed.add("city") + self.city = value + value = find_attr_value_("state", node) + if value is not None and "state" not in already_processed: + already_processed.add("state") + self.state = value + value = find_attr_value_("country", node) + if value is not None and "country" not in already_processed: + already_processed.add("country") + self.country = value + value = find_attr_value_("zip", node) + if value is not None and "zip" not in already_processed: + already_processed.add("zip") + self.zip = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class ShipperType + + +class CODType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, paymentType=None, CODReturnAddress=None, gds_collector_=None, **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.paymentType = _cast(None, paymentType) + self.paymentType_nsprefix_ = None + self.CODReturnAddress = CODReturnAddress + self.CODReturnAddress_nsprefix_ = None + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, CODType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CODType.subclass: + return CODType.subclass(*args_, **kwargs_) + else: + return CODType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_CODReturnAddress(self): + return self.CODReturnAddress + + def set_CODReturnAddress(self, CODReturnAddress): + self.CODReturnAddress = CODReturnAddress + + def get_paymentType(self): + return self.paymentType + + def set_paymentType(self, paymentType): + self.paymentType = paymentType + + def _hasContent(self): + if self.CODReturnAddress is not None: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="CODType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("CODType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "CODType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="CODType" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="CODType", + pretty_print=pretty_print, + ) + showIndent(outfile, level, pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="CODType" + ): + if self.paymentType is not None and "paymentType" not in already_processed: + already_processed.add("paymentType") + outfile.write( + " paymentType=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.paymentType), input_name="paymentType" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="CODType", + fromsubclass_=False, + pretty_print=True, + ): + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.CODReturnAddress is not None: + namespaceprefix_ = ( + self.CODReturnAddress_nsprefix_ + ":" + if (UseCapturedNS_ and self.CODReturnAddress_nsprefix_) + else "" + ) + self.CODReturnAddress.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="CODReturnAddress", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("paymentType", node) + if value is not None and "paymentType" not in already_processed: + already_processed.add("paymentType") + self.paymentType = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "CODReturnAddress": + obj_ = CODReturnAddressType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.CODReturnAddress = obj_ + obj_.original_tagname_ = "CODReturnAddress" + + +# end class CODType + + +class CODReturnAddressType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, + codCompany=None, + codName=None, + codAddress1=None, + codCity=None, + codStateCode=None, + codZip=None, + codCountry=None, + valueOf_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.codCompany = _cast(None, codCompany) + self.codCompany_nsprefix_ = None + self.codName = _cast(None, codName) + self.codName_nsprefix_ = None + self.codAddress1 = _cast(None, codAddress1) + self.codAddress1_nsprefix_ = None + self.codCity = _cast(None, codCity) + self.codCity_nsprefix_ = None + self.codStateCode = _cast(None, codStateCode) + self.codStateCode_nsprefix_ = None + self.codZip = _cast(None, codZip) + self.codZip_nsprefix_ = None + self.codCountry = _cast(None, codCountry) + self.codCountry_nsprefix_ = None + self.valueOf_ = valueOf_ + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CODReturnAddressType + ) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CODReturnAddressType.subclass: + return CODReturnAddressType.subclass(*args_, **kwargs_) + else: + return CODReturnAddressType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_codCompany(self): + return self.codCompany + + def set_codCompany(self, codCompany): + self.codCompany = codCompany + + def get_codName(self): + return self.codName + + def set_codName(self, codName): + self.codName = codName + + def get_codAddress1(self): + return self.codAddress1 + + def set_codAddress1(self, codAddress1): + self.codAddress1 = codAddress1 + + def get_codCity(self): + return self.codCity + + def set_codCity(self, codCity): + self.codCity = codCity + + def get_codStateCode(self): + return self.codStateCode + + def set_codStateCode(self, codStateCode): + self.codStateCode = codStateCode + + def get_codZip(self): + return self.codZip + + def set_codZip(self, codZip): + self.codZip = codZip + + def get_codCountry(self): + return self.codCountry + + def set_codCountry(self, codCountry): + self.codCountry = codCountry + + def get_valueOf_(self): + return self.valueOf_ + + def set_valueOf_(self, valueOf_): + self.valueOf_ = valueOf_ + + def _hasContent(self): + if 1 if type(self.valueOf_) in [int, float] else self.valueOf_: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="CODReturnAddressType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("CODReturnAddressType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "CODReturnAddressType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="CODReturnAddressType", + ) + if self._hasContent(): + outfile.write(">") + outfile.write(self.convert_unicode(self.valueOf_)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="CODReturnAddressType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="CODReturnAddressType", + ): + if self.codCompany is not None and "codCompany" not in already_processed: + already_processed.add("codCompany") + outfile.write( + " codCompany=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.codCompany), input_name="codCompany" + ) + ), + ) + ) + if self.codName is not None and "codName" not in already_processed: + already_processed.add("codName") + outfile.write( + " codName=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.codName), input_name="codName" + ) + ), + ) + ) + if self.codAddress1 is not None and "codAddress1" not in already_processed: + already_processed.add("codAddress1") + outfile.write( + " codAddress1=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.codAddress1), input_name="codAddress1" + ) + ), + ) + ) + if self.codCity is not None and "codCity" not in already_processed: + already_processed.add("codCity") + outfile.write( + " codCity=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.codCity), input_name="codCity" + ) + ), + ) + ) + if self.codStateCode is not None and "codStateCode" not in already_processed: + already_processed.add("codStateCode") + outfile.write( + " codStateCode=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.codStateCode), input_name="codStateCode" + ) + ), + ) + ) + if self.codZip is not None and "codZip" not in already_processed: + already_processed.add("codZip") + outfile.write( + " codZip=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.codZip), input_name="codZip" + ) + ), + ) + ) + if self.codCountry is not None and "codCountry" not in already_processed: + already_processed.add("codCountry") + outfile.write( + " codCountry=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.codCountry), input_name="codCountry" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="CODReturnAddressType", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("codCompany", node) + if value is not None and "codCompany" not in already_processed: + already_processed.add("codCompany") + self.codCompany = value + value = find_attr_value_("codName", node) + if value is not None and "codName" not in already_processed: + already_processed.add("codName") + self.codName = value + value = find_attr_value_("codAddress1", node) + if value is not None and "codAddress1" not in already_processed: + already_processed.add("codAddress1") + self.codAddress1 = value + value = find_attr_value_("codCity", node) + if value is not None and "codCity" not in already_processed: + already_processed.add("codCity") + self.codCity = value + value = find_attr_value_("codStateCode", node) + if value is not None and "codStateCode" not in already_processed: + already_processed.add("codStateCode") + self.codStateCode = value + value = find_attr_value_("codZip", node) + if value is not None and "codZip" not in already_processed: + already_processed.add("codZip") + self.codZip = value + value = find_attr_value_("codCountry", node) + if value is not None and "codCountry" not in already_processed: + already_processed.add("codCountry") + self.codCountry = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class CODReturnAddressType + + +class PackagesType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__(self, type_=None, Package=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.type_ = _cast(None, type_) + self.type__nsprefix_ = None + if Package is None: + self.Package = [] + else: + self.Package = Package + self.Package_nsprefix_ = None + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, PackagesType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PackagesType.subclass: + return PackagesType.subclass(*args_, **kwargs_) + else: + return PackagesType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_Package(self): + return self.Package + + def set_Package(self, Package): + self.Package = Package + + def add_Package(self, value): + self.Package.append(value) + + def insert_Package_at(self, index, value): + self.Package.insert(index, value) + + def replace_Package_at(self, index, value): + self.Package[index] = value + + def get_type(self): + return self.type_ + + def set_type(self, type_): + self.type_ = type_ + + def _hasContent(self): + if self.Package: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PackagesType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("PackagesType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "PackagesType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="PackagesType" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="PackagesType", + pretty_print=pretty_print, + ) + showIndent(outfile, level, pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="PackagesType", + ): + if self.type_ is not None and "type_" not in already_processed: + already_processed.add("type_") + outfile.write( + " type=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.type_), input_name="type" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PackagesType", + fromsubclass_=False, + pretty_print=True, + ): + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + for Package_ in self.Package: + namespaceprefix_ = ( + self.Package_nsprefix_ + ":" + if (UseCapturedNS_ and self.Package_nsprefix_) + else "" + ) + Package_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Package", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("type", node) + if value is not None and "type" not in already_processed: + already_processed.add("type") + self.type_ = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "Package": + obj_ = PackageType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Package.append(obj_) + obj_.original_tagname_ = "Package" + + +# end class PackagesType + + +class PackageType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, + length=None, + width=None, + height=None, + weight=None, + weightOz=None, + type_=None, + freightClass=None, + insuranceAmount=None, + codAmount=None, + description=None, + nmfcCode=None, + valueOf_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.length = _cast(int, length) + self.length_nsprefix_ = None + self.width = _cast(int, width) + self.width_nsprefix_ = None + self.height = _cast(int, height) + self.height_nsprefix_ = None + self.weight = _cast(int, weight) + self.weight_nsprefix_ = None + self.weightOz = _cast(int, weightOz) + self.weightOz_nsprefix_ = None + self.type_ = _cast(None, type_) + self.type__nsprefix_ = None + self.freightClass = _cast(None, freightClass) + self.freightClass_nsprefix_ = None + self.insuranceAmount = _cast(float, insuranceAmount) + self.insuranceAmount_nsprefix_ = None + self.codAmount = _cast(float, codAmount) + self.codAmount_nsprefix_ = None + self.description = _cast(None, description) + self.description_nsprefix_ = None + self.nmfcCode = _cast(int, nmfcCode) + self.nmfcCode_nsprefix_ = None + self.valueOf_ = valueOf_ + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, PackageType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PackageType.subclass: + return PackageType.subclass(*args_, **kwargs_) + else: + return PackageType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_length(self): + return self.length + + def set_length(self, length): + self.length = length + + def get_width(self): + return self.width + + def set_width(self, width): + self.width = width + + def get_height(self): + return self.height + + def set_height(self, height): + self.height = height + + def get_weight(self): + return self.weight + + def set_weight(self, weight): + self.weight = weight + + def get_weightOz(self): + return self.weightOz + + def set_weightOz(self, weightOz): + self.weightOz = weightOz + + def get_type(self): + return self.type_ + + def set_type(self, type_): + self.type_ = type_ + + def get_freightClass(self): + return self.freightClass + + def set_freightClass(self, freightClass): + self.freightClass = freightClass + + def get_insuranceAmount(self): + return self.insuranceAmount + + def set_insuranceAmount(self, insuranceAmount): + self.insuranceAmount = insuranceAmount + + def get_codAmount(self): + return self.codAmount + + def set_codAmount(self, codAmount): + self.codAmount = codAmount + + def get_description(self): + return self.description + + def set_description(self, description): + self.description = description + + def get_nmfcCode(self): + return self.nmfcCode + + def set_nmfcCode(self, nmfcCode): + self.nmfcCode = nmfcCode + + def get_valueOf_(self): + return self.valueOf_ + + def set_valueOf_(self, valueOf_): + self.valueOf_ = valueOf_ + + def _hasContent(self): + if 1 if type(self.valueOf_) in [int, float] else self.valueOf_: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PackageType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("PackageType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "PackageType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="PackageType" + ) + if self._hasContent(): + outfile.write(">") + outfile.write(self.convert_unicode(self.valueOf_)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="PackageType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="PackageType", + ): + if self.length is not None and "length" not in already_processed: + already_processed.add("length") + outfile.write( + ' length="%s"' + % self.gds_format_integer(self.length, input_name="length") + ) + if self.width is not None and "width" not in already_processed: + already_processed.add("width") + outfile.write( + ' width="%s"' % self.gds_format_integer(self.width, input_name="width") + ) + if self.height is not None and "height" not in already_processed: + already_processed.add("height") + outfile.write( + ' height="%s"' + % self.gds_format_integer(self.height, input_name="height") + ) + if self.weight is not None and "weight" not in already_processed: + already_processed.add("weight") + outfile.write( + ' weight="%s"' + % self.gds_format_integer(self.weight, input_name="weight") + ) + if self.weightOz is not None and "weightOz" not in already_processed: + already_processed.add("weightOz") + outfile.write( + ' weightOz="%s"' + % self.gds_format_integer(self.weightOz, input_name="weightOz") + ) + if self.type_ is not None and "type_" not in already_processed: + already_processed.add("type_") + outfile.write( + " type=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.type_), input_name="type" + ) + ), + ) + ) + if self.freightClass is not None and "freightClass" not in already_processed: + already_processed.add("freightClass") + outfile.write( + " freightClass=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.freightClass), input_name="freightClass" + ) + ), + ) + ) + if ( + self.insuranceAmount is not None + and "insuranceAmount" not in already_processed + ): + already_processed.add("insuranceAmount") + outfile.write( + ' insuranceAmount="%s"' + % self.gds_format_float( + self.insuranceAmount, input_name="insuranceAmount" + ) + ) + if self.codAmount is not None and "codAmount" not in already_processed: + already_processed.add("codAmount") + outfile.write( + ' codAmount="%s"' + % self.gds_format_float(self.codAmount, input_name="codAmount") + ) + if self.description is not None and "description" not in already_processed: + already_processed.add("description") + outfile.write( + " description=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.description), input_name="description" + ) + ), + ) + ) + if self.nmfcCode is not None and "nmfcCode" not in already_processed: + already_processed.add("nmfcCode") + outfile.write( + ' nmfcCode="%s"' + % self.gds_format_integer(self.nmfcCode, input_name="nmfcCode") + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PackageType", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("length", node) + if value is not None and "length" not in already_processed: + already_processed.add("length") + self.length = self.gds_parse_integer(value, node, "length") + value = find_attr_value_("width", node) + if value is not None and "width" not in already_processed: + already_processed.add("width") + self.width = self.gds_parse_integer(value, node, "width") + value = find_attr_value_("height", node) + if value is not None and "height" not in already_processed: + already_processed.add("height") + self.height = self.gds_parse_integer(value, node, "height") + value = find_attr_value_("weight", node) + if value is not None and "weight" not in already_processed: + already_processed.add("weight") + self.weight = self.gds_parse_integer(value, node, "weight") + value = find_attr_value_("weightOz", node) + if value is not None and "weightOz" not in already_processed: + already_processed.add("weightOz") + self.weightOz = self.gds_parse_integer(value, node, "weightOz") + value = find_attr_value_("type", node) + if value is not None and "type" not in already_processed: + already_processed.add("type") + self.type_ = value + value = find_attr_value_("freightClass", node) + if value is not None and "freightClass" not in already_processed: + already_processed.add("freightClass") + self.freightClass = value + value = find_attr_value_("insuranceAmount", node) + if value is not None and "insuranceAmount" not in already_processed: + already_processed.add("insuranceAmount") + value = self.gds_parse_float(value, node, "insuranceAmount") + self.insuranceAmount = value + value = find_attr_value_("codAmount", node) + if value is not None and "codAmount" not in already_processed: + already_processed.add("codAmount") + value = self.gds_parse_float(value, node, "codAmount") + self.codAmount = value + value = find_attr_value_("description", node) + if value is not None and "description" not in already_processed: + already_processed.add("description") + self.description = value + value = find_attr_value_("nmfcCode", node) + if value is not None and "nmfcCode" not in already_processed: + already_processed.add("nmfcCode") + self.nmfcCode = self.gds_parse_integer(value, node, "nmfcCode") + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class PackageType + + +class PickupType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, + contactName=None, + phoneNumber=None, + pickupDate=None, + pickupTime=None, + closingTime=None, + location=None, + valueOf_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.contactName = _cast(None, contactName) + self.contactName_nsprefix_ = None + self.phoneNumber = _cast(None, phoneNumber) + self.phoneNumber_nsprefix_ = None + if isinstance(pickupDate, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(pickupDate, "%Y-%m-%d").date() + else: + initvalue_ = pickupDate + self.pickupDate = initvalue_ + self.pickupTime = _cast(None, pickupTime) + self.pickupTime_nsprefix_ = None + self.closingTime = _cast(None, closingTime) + self.closingTime_nsprefix_ = None + self.location = _cast(None, location) + self.location_nsprefix_ = None + self.valueOf_ = valueOf_ + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, PickupType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PickupType.subclass: + return PickupType.subclass(*args_, **kwargs_) + else: + return PickupType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_contactName(self): + return self.contactName + + def set_contactName(self, contactName): + self.contactName = contactName + + def get_phoneNumber(self): + return self.phoneNumber + + def set_phoneNumber(self, phoneNumber): + self.phoneNumber = phoneNumber + + def get_pickupDate(self): + return self.pickupDate + + def set_pickupDate(self, pickupDate): + self.pickupDate = pickupDate + + def get_pickupTime(self): + return self.pickupTime + + def set_pickupTime(self, pickupTime): + self.pickupTime = pickupTime + + def get_closingTime(self): + return self.closingTime + + def set_closingTime(self, closingTime): + self.closingTime = closingTime + + def get_location(self): + return self.location + + def set_location(self, location): + self.location = location + + def get_valueOf_(self): + return self.valueOf_ + + def set_valueOf_(self, valueOf_): + self.valueOf_ = valueOf_ + + def _hasContent(self): + if 1 if type(self.valueOf_) in [int, float] else self.valueOf_: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PickupType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("PickupType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "PickupType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="PickupType" + ) + if self._hasContent(): + outfile.write(">") + outfile.write(self.convert_unicode(self.valueOf_)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="PickupType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="PickupType" + ): + if self.contactName is not None and "contactName" not in already_processed: + already_processed.add("contactName") + outfile.write( + " contactName=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.contactName), input_name="contactName" + ) + ), + ) + ) + if self.phoneNumber is not None and "phoneNumber" not in already_processed: + already_processed.add("phoneNumber") + outfile.write( + " phoneNumber=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.phoneNumber), input_name="phoneNumber" + ) + ), + ) + ) + if self.pickupDate is not None and "pickupDate" not in already_processed: + already_processed.add("pickupDate") + outfile.write( + ' pickupDate="%s"' + % self.gds_format_date(self.pickupDate, input_name="pickupDate") + ) + if self.pickupTime is not None and "pickupTime" not in already_processed: + already_processed.add("pickupTime") + outfile.write( + " pickupTime=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.pickupTime), input_name="pickupTime" + ) + ), + ) + ) + if self.closingTime is not None and "closingTime" not in already_processed: + already_processed.add("closingTime") + outfile.write( + " closingTime=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.closingTime), input_name="closingTime" + ) + ), + ) + ) + if self.location is not None and "location" not in already_processed: + already_processed.add("location") + outfile.write( + " location=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.location), input_name="location" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PickupType", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("contactName", node) + if value is not None and "contactName" not in already_processed: + already_processed.add("contactName") + self.contactName = value + value = find_attr_value_("phoneNumber", node) + if value is not None and "phoneNumber" not in already_processed: + already_processed.add("phoneNumber") + self.phoneNumber = value + value = find_attr_value_("pickupDate", node) + if value is not None and "pickupDate" not in already_processed: + already_processed.add("pickupDate") + try: + self.pickupDate = self.gds_parse_date(value) + except ValueError as exp: + raise ValueError("Bad date attribute (pickupDate): %s" % exp) + value = find_attr_value_("pickupTime", node) + if value is not None and "pickupTime" not in already_processed: + already_processed.add("pickupTime") + self.pickupTime = value + value = find_attr_value_("closingTime", node) + if value is not None and "closingTime" not in already_processed: + already_processed.add("closingTime") + self.closingTime = value + value = find_attr_value_("location", node) + if value is not None and "location" not in already_processed: + already_processed.add("location") + self.location = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class PickupType + + +class PaymentType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__(self, type_=None, valueOf_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.type_ = _cast(None, type_) + self.type__nsprefix_ = None + self.valueOf_ = valueOf_ + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, PaymentType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PaymentType.subclass: + return PaymentType.subclass(*args_, **kwargs_) + else: + return PaymentType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_type(self): + return self.type_ + + def set_type(self, type_): + self.type_ = type_ + + def get_valueOf_(self): + return self.valueOf_ + + def set_valueOf_(self, valueOf_): + self.valueOf_ = valueOf_ + + def _hasContent(self): + if 1 if type(self.valueOf_) in [int, float] else self.valueOf_: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PaymentType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("PaymentType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "PaymentType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="PaymentType" + ) + if self._hasContent(): + outfile.write(">") + outfile.write(self.convert_unicode(self.valueOf_)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="PaymentType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="PaymentType", + ): + if self.type_ is not None and "type_" not in already_processed: + already_processed.add("type_") + outfile.write( + " type=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.type_), input_name="type" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PaymentType", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("type", node) + if value is not None and "type" not in already_processed: + already_processed.add("type") + self.type_ = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class PaymentType + + +class ReferenceType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, name=None, code=None, valueOf_=None, gds_collector_=None, **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.name = _cast(None, name) + self.name_nsprefix_ = None + self.code = _cast(None, code) + self.code_nsprefix_ = None + self.valueOf_ = valueOf_ + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, ReferenceType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ReferenceType.subclass: + return ReferenceType.subclass(*args_, **kwargs_) + else: + return ReferenceType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_name(self): + return self.name + + def set_name(self, name): + self.name = name + + def get_code(self): + return self.code + + def set_code(self, code): + self.code = code + + def get_valueOf_(self): + return self.valueOf_ + + def set_valueOf_(self, valueOf_): + self.valueOf_ = valueOf_ + + def _hasContent(self): + if 1 if type(self.valueOf_) in [int, float] else self.valueOf_: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ReferenceType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ReferenceType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ReferenceType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ReferenceType" + ) + if self._hasContent(): + outfile.write(">") + outfile.write(self.convert_unicode(self.valueOf_)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ReferenceType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ReferenceType", + ): + if self.name is not None and "name" not in already_processed: + already_processed.add("name") + outfile.write( + " name=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.name), input_name="name" + ) + ), + ) + ) + if self.code is not None and "code" not in already_processed: + already_processed.add("code") + outfile.write( + " code=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.code), input_name="code" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ReferenceType", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("name", node) + if value is not None and "name" not in already_processed: + already_processed.add("name") + self.name = value + value = find_attr_value_("code", node) + if value is not None and "code" not in already_processed: + already_processed.add("code") + self.code = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class ReferenceType + + +class CustomsInvoiceType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, + Currency=None, + brokerName=None, + contactCompany=None, + shipperTaxID=None, + receiverTaxID=None, + contactName=None, + contactPhone=None, + BillTo=None, + Contact=None, + Item=None, + DutiesTaxes=None, + InBondManifest=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.Currency = _cast(None, Currency) + self.Currency_nsprefix_ = None + self.brokerName = _cast(None, brokerName) + self.brokerName_nsprefix_ = None + self.contactCompany = _cast(None, contactCompany) + self.contactCompany_nsprefix_ = None + self.shipperTaxID = _cast(None, shipperTaxID) + self.shipperTaxID_nsprefix_ = None + self.receiverTaxID = _cast(None, receiverTaxID) + self.receiverTaxID_nsprefix_ = None + self.contactName = _cast(None, contactName) + self.contactName_nsprefix_ = None + self.contactPhone = _cast(None, contactPhone) + self.contactPhone_nsprefix_ = None + self.BillTo = BillTo + self.BillTo_nsprefix_ = None + self.Contact = Contact + self.Contact_nsprefix_ = None + if Item is None: + self.Item = [] + else: + self.Item = Item + self.Item_nsprefix_ = None + self.DutiesTaxes = DutiesTaxes + self.DutiesTaxes_nsprefix_ = None + self.InBondManifest = InBondManifest + self.InBondManifest_nsprefix_ = None + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CustomsInvoiceType + ) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CustomsInvoiceType.subclass: + return CustomsInvoiceType.subclass(*args_, **kwargs_) + else: + return CustomsInvoiceType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_BillTo(self): + return self.BillTo + + def set_BillTo(self, BillTo): + self.BillTo = BillTo + + def get_Contact(self): + return self.Contact + + def set_Contact(self, Contact): + self.Contact = Contact + + def get_Item(self): + return self.Item + + def set_Item(self, Item): + self.Item = Item + + def add_Item(self, value): + self.Item.append(value) + + def insert_Item_at(self, index, value): + self.Item.insert(index, value) + + def replace_Item_at(self, index, value): + self.Item[index] = value + + def get_DutiesTaxes(self): + return self.DutiesTaxes + + def set_DutiesTaxes(self, DutiesTaxes): + self.DutiesTaxes = DutiesTaxes + + def get_InBondManifest(self): + return self.InBondManifest + + def set_InBondManifest(self, InBondManifest): + self.InBondManifest = InBondManifest + + def get_Currency(self): + return self.Currency + + def set_Currency(self, Currency): + self.Currency = Currency + + def get_brokerName(self): + return self.brokerName + + def set_brokerName(self, brokerName): + self.brokerName = brokerName + + def get_contactCompany(self): + return self.contactCompany + + def set_contactCompany(self, contactCompany): + self.contactCompany = contactCompany + + def get_shipperTaxID(self): + return self.shipperTaxID + + def set_shipperTaxID(self, shipperTaxID): + self.shipperTaxID = shipperTaxID + + def get_receiverTaxID(self): + return self.receiverTaxID + + def set_receiverTaxID(self, receiverTaxID): + self.receiverTaxID = receiverTaxID + + def get_contactName(self): + return self.contactName + + def set_contactName(self, contactName): + self.contactName = contactName + + def get_contactPhone(self): + return self.contactPhone + + def set_contactPhone(self, contactPhone): + self.contactPhone = contactPhone + + def _hasContent(self): + if ( + self.BillTo is not None + or self.Contact is not None + or self.Item + or self.DutiesTaxes is not None + or self.InBondManifest is not None + ): + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="CustomsInvoiceType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("CustomsInvoiceType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "CustomsInvoiceType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="CustomsInvoiceType", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="CustomsInvoiceType", + pretty_print=pretty_print, + ) + showIndent(outfile, level, pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="CustomsInvoiceType", + ): + if self.Currency is not None and "Currency" not in already_processed: + already_processed.add("Currency") + outfile.write( + " Currency=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.Currency), input_name="Currency" + ) + ), + ) + ) + if self.brokerName is not None and "brokerName" not in already_processed: + already_processed.add("brokerName") + outfile.write( + " brokerName=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.brokerName), input_name="brokerName" + ) + ), + ) + ) + if ( + self.contactCompany is not None + and "contactCompany" not in already_processed + ): + already_processed.add("contactCompany") + outfile.write( + " contactCompany=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.contactCompany), + input_name="contactCompany", + ) + ), + ) + ) + if self.shipperTaxID is not None and "shipperTaxID" not in already_processed: + already_processed.add("shipperTaxID") + outfile.write( + " shipperTaxID=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.shipperTaxID), input_name="shipperTaxID" + ) + ), + ) + ) + if self.receiverTaxID is not None and "receiverTaxID" not in already_processed: + already_processed.add("receiverTaxID") + outfile.write( + " receiverTaxID=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.receiverTaxID), input_name="receiverTaxID" + ) + ), + ) + ) + if self.contactName is not None and "contactName" not in already_processed: + already_processed.add("contactName") + outfile.write( + " contactName=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.contactName), input_name="contactName" + ) + ), + ) + ) + if self.contactPhone is not None and "contactPhone" not in already_processed: + already_processed.add("contactPhone") + outfile.write( + " contactPhone=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.contactPhone), input_name="contactPhone" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="CustomsInvoiceType", + fromsubclass_=False, + pretty_print=True, + ): + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.BillTo is not None: + namespaceprefix_ = ( + self.BillTo_nsprefix_ + ":" + if (UseCapturedNS_ and self.BillTo_nsprefix_) + else "" + ) + self.BillTo.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="BillTo", + pretty_print=pretty_print, + ) + if self.Contact is not None: + namespaceprefix_ = ( + self.Contact_nsprefix_ + ":" + if (UseCapturedNS_ and self.Contact_nsprefix_) + else "" + ) + self.Contact.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Contact", + pretty_print=pretty_print, + ) + for Item_ in self.Item: + namespaceprefix_ = ( + self.Item_nsprefix_ + ":" + if (UseCapturedNS_ and self.Item_nsprefix_) + else "" + ) + Item_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Item", + pretty_print=pretty_print, + ) + if self.DutiesTaxes is not None: + namespaceprefix_ = ( + self.DutiesTaxes_nsprefix_ + ":" + if (UseCapturedNS_ and self.DutiesTaxes_nsprefix_) + else "" + ) + self.DutiesTaxes.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="DutiesTaxes", + pretty_print=pretty_print, + ) + if self.InBondManifest is not None: + namespaceprefix_ = ( + self.InBondManifest_nsprefix_ + ":" + if (UseCapturedNS_ and self.InBondManifest_nsprefix_) + else "" + ) + self.InBondManifest.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="InBondManifest", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("Currency", node) + if value is not None and "Currency" not in already_processed: + already_processed.add("Currency") + self.Currency = value + value = find_attr_value_("brokerName", node) + if value is not None and "brokerName" not in already_processed: + already_processed.add("brokerName") + self.brokerName = value + value = find_attr_value_("contactCompany", node) + if value is not None and "contactCompany" not in already_processed: + already_processed.add("contactCompany") + self.contactCompany = value + value = find_attr_value_("shipperTaxID", node) + if value is not None and "shipperTaxID" not in already_processed: + already_processed.add("shipperTaxID") + self.shipperTaxID = value + value = find_attr_value_("receiverTaxID", node) + if value is not None and "receiverTaxID" not in already_processed: + already_processed.add("receiverTaxID") + self.receiverTaxID = value + value = find_attr_value_("contactName", node) + if value is not None and "contactName" not in already_processed: + already_processed.add("contactName") + self.contactName = value + value = find_attr_value_("contactPhone", node) + if value is not None and "contactPhone" not in already_processed: + already_processed.add("contactPhone") + self.contactPhone = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "BillTo": + obj_ = BillToType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.BillTo = obj_ + obj_.original_tagname_ = "BillTo" + elif nodeName_ == "Contact": + obj_ = ContactType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Contact = obj_ + obj_.original_tagname_ = "Contact" + elif nodeName_ == "Item": + obj_ = ItemType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Item.append(obj_) + obj_.original_tagname_ = "Item" + elif nodeName_ == "DutiesTaxes": + obj_ = DutiesTaxesType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.DutiesTaxes = obj_ + obj_.original_tagname_ = "DutiesTaxes" + elif nodeName_ == "InBondManifest": + obj_ = InBondManifestType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.InBondManifest = obj_ + obj_.original_tagname_ = "InBondManifest" + + +# end class CustomsInvoiceType + + +class BillToType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, + company=None, + name=None, + address1=None, + city=None, + state=None, + zip=None, + country=None, + valueOf_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.company = _cast(None, company) + self.company_nsprefix_ = None + self.name = _cast(None, name) + self.name_nsprefix_ = None + self.address1 = _cast(None, address1) + self.address1_nsprefix_ = None + self.city = _cast(None, city) + self.city_nsprefix_ = None + self.state = _cast(None, state) + self.state_nsprefix_ = None + self.zip = _cast(None, zip) + self.zip_nsprefix_ = None + self.country = _cast(None, country) + self.country_nsprefix_ = None + self.valueOf_ = valueOf_ + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, BillToType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if BillToType.subclass: + return BillToType.subclass(*args_, **kwargs_) + else: + return BillToType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_company(self): + return self.company + + def set_company(self, company): + self.company = company + + def get_name(self): + return self.name + + def set_name(self, name): + self.name = name + + def get_address1(self): + return self.address1 + + def set_address1(self, address1): + self.address1 = address1 + + def get_city(self): + return self.city + + def set_city(self, city): + self.city = city + + def get_state(self): + return self.state + + def set_state(self, state): + self.state = state + + def get_zip(self): + return self.zip + + def set_zip(self, zip): + self.zip = zip + + def get_country(self): + return self.country + + def set_country(self, country): + self.country = country + + def get_valueOf_(self): + return self.valueOf_ + + def set_valueOf_(self, valueOf_): + self.valueOf_ = valueOf_ + + def _hasContent(self): + if 1 if type(self.valueOf_) in [int, float] else self.valueOf_: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BillToType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BillToType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "BillToType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="BillToType" + ) + if self._hasContent(): + outfile.write(">") + outfile.write(self.convert_unicode(self.valueOf_)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BillToType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="BillToType" + ): + if self.company is not None and "company" not in already_processed: + already_processed.add("company") + outfile.write( + " company=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.company), input_name="company" + ) + ), + ) + ) + if self.name is not None and "name" not in already_processed: + already_processed.add("name") + outfile.write( + " name=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.name), input_name="name" + ) + ), + ) + ) + if self.address1 is not None and "address1" not in already_processed: + already_processed.add("address1") + outfile.write( + " address1=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.address1), input_name="address1" + ) + ), + ) + ) + if self.city is not None and "city" not in already_processed: + already_processed.add("city") + outfile.write( + " city=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.city), input_name="city" + ) + ), + ) + ) + if self.state is not None and "state" not in already_processed: + already_processed.add("state") + outfile.write( + " state=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.state), input_name="state" + ) + ), + ) + ) + if self.zip is not None and "zip" not in already_processed: + already_processed.add("zip") + outfile.write( + " zip=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.zip), input_name="zip") + ), + ) + ) + if self.country is not None and "country" not in already_processed: + already_processed.add("country") + outfile.write( + " country=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.country), input_name="country" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BillToType", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("company", node) + if value is not None and "company" not in already_processed: + already_processed.add("company") + self.company = value + value = find_attr_value_("name", node) + if value is not None and "name" not in already_processed: + already_processed.add("name") + self.name = value + value = find_attr_value_("address1", node) + if value is not None and "address1" not in already_processed: + already_processed.add("address1") + self.address1 = value + value = find_attr_value_("city", node) + if value is not None and "city" not in already_processed: + already_processed.add("city") + self.city = value + value = find_attr_value_("state", node) + if value is not None and "state" not in already_processed: + already_processed.add("state") + self.state = value + value = find_attr_value_("zip", node) + if value is not None and "zip" not in already_processed: + already_processed.add("zip") + self.zip = value + value = find_attr_value_("country", node) + if value is not None and "country" not in already_processed: + already_processed.add("country") + self.country = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class BillToType + + +class ContactType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, name=None, phone=None, valueOf_=None, gds_collector_=None, **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.name = _cast(None, name) + self.name_nsprefix_ = None + self.phone = _cast(None, phone) + self.phone_nsprefix_ = None + self.valueOf_ = valueOf_ + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, ContactType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ContactType.subclass: + return ContactType.subclass(*args_, **kwargs_) + else: + return ContactType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_name(self): + return self.name + + def set_name(self, name): + self.name = name + + def get_phone(self): + return self.phone + + def set_phone(self, phone): + self.phone = phone + + def get_valueOf_(self): + return self.valueOf_ + + def set_valueOf_(self, valueOf_): + self.valueOf_ = valueOf_ + + def _hasContent(self): + if 1 if type(self.valueOf_) in [int, float] else self.valueOf_: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ContactType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ContactType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ContactType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ContactType" + ) + if self._hasContent(): + outfile.write(">") + outfile.write(self.convert_unicode(self.valueOf_)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ContactType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ContactType", + ): + if self.name is not None and "name" not in already_processed: + already_processed.add("name") + outfile.write( + " name=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.name), input_name="name" + ) + ), + ) + ) + if self.phone is not None and "phone" not in already_processed: + already_processed.add("phone") + outfile.write( + " phone=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.phone), input_name="phone" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ContactType", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("name", node) + if value is not None and "name" not in already_processed: + already_processed.add("name") + self.name = value + value = find_attr_value_("phone", node) + if value is not None and "phone" not in already_processed: + already_processed.add("phone") + self.phone = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class ContactType + + +class ItemType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, + code=None, + description=None, + originCountry=None, + quantity=None, + unitPrice=None, + skuCode=None, + valueOf_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.code = _cast(None, code) + self.code_nsprefix_ = None + self.description = _cast(None, description) + self.description_nsprefix_ = None + self.originCountry = _cast(None, originCountry) + self.originCountry_nsprefix_ = None + self.quantity = _cast(int, quantity) + self.quantity_nsprefix_ = None + self.unitPrice = _cast(float, unitPrice) + self.unitPrice_nsprefix_ = None + self.skuCode = _cast(None, skuCode) + self.skuCode_nsprefix_ = None + self.valueOf_ = valueOf_ + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, ItemType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ItemType.subclass: + return ItemType.subclass(*args_, **kwargs_) + else: + return ItemType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_code(self): + return self.code + + def set_code(self, code): + self.code = code + + def get_description(self): + return self.description + + def set_description(self, description): + self.description = description + + def get_originCountry(self): + return self.originCountry + + def set_originCountry(self, originCountry): + self.originCountry = originCountry + + def get_quantity(self): + return self.quantity + + def set_quantity(self, quantity): + self.quantity = quantity + + def get_unitPrice(self): + return self.unitPrice + + def set_unitPrice(self, unitPrice): + self.unitPrice = unitPrice + + def get_skuCode(self): + return self.skuCode + + def set_skuCode(self, skuCode): + self.skuCode = skuCode + + def get_valueOf_(self): + return self.valueOf_ + + def set_valueOf_(self, valueOf_): + self.valueOf_ = valueOf_ + + def _hasContent(self): + if 1 if type(self.valueOf_) in [int, float] else self.valueOf_: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ItemType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ItemType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ItemType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ItemType" + ) + if self._hasContent(): + outfile.write(">") + outfile.write(self.convert_unicode(self.valueOf_)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ItemType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="ItemType" + ): + if self.code is not None and "code" not in already_processed: + already_processed.add("code") + outfile.write( + " code=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.code), input_name="code" + ) + ), + ) + ) + if self.description is not None and "description" not in already_processed: + already_processed.add("description") + outfile.write( + " description=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.description), input_name="description" + ) + ), + ) + ) + if self.originCountry is not None and "originCountry" not in already_processed: + already_processed.add("originCountry") + outfile.write( + " originCountry=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.originCountry), input_name="originCountry" + ) + ), + ) + ) + if self.quantity is not None and "quantity" not in already_processed: + already_processed.add("quantity") + outfile.write( + ' quantity="%s"' + % self.gds_format_integer(self.quantity, input_name="quantity") + ) + if self.unitPrice is not None and "unitPrice" not in already_processed: + already_processed.add("unitPrice") + outfile.write( + ' unitPrice="%s"' + % self.gds_format_float(self.unitPrice, input_name="unitPrice") + ) + if self.skuCode is not None and "skuCode" not in already_processed: + already_processed.add("skuCode") + outfile.write( + " skuCode=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.skuCode), input_name="skuCode" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ItemType", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("code", node) + if value is not None and "code" not in already_processed: + already_processed.add("code") + self.code = value + value = find_attr_value_("description", node) + if value is not None and "description" not in already_processed: + already_processed.add("description") + self.description = value + value = find_attr_value_("originCountry", node) + if value is not None and "originCountry" not in already_processed: + already_processed.add("originCountry") + self.originCountry = value + value = find_attr_value_("quantity", node) + if value is not None and "quantity" not in already_processed: + already_processed.add("quantity") + self.quantity = self.gds_parse_integer(value, node, "quantity") + value = find_attr_value_("unitPrice", node) + if value is not None and "unitPrice" not in already_processed: + already_processed.add("unitPrice") + value = self.gds_parse_float(value, node, "unitPrice") + self.unitPrice = value + value = find_attr_value_("skuCode", node) + if value is not None and "skuCode" not in already_processed: + already_processed.add("skuCode") + self.skuCode = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class ItemType + + +class DutiesTaxesType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, + consigneeAccount=None, + sedNumber=None, + dutiable=None, + billTo=None, + valueOf_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.consigneeAccount = _cast(None, consigneeAccount) + self.consigneeAccount_nsprefix_ = None + self.sedNumber = _cast(None, sedNumber) + self.sedNumber_nsprefix_ = None + self.dutiable = _cast(None, dutiable) + self.dutiable_nsprefix_ = None + self.billTo = _cast(None, billTo) + self.billTo_nsprefix_ = None + self.valueOf_ = valueOf_ + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, DutiesTaxesType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if DutiesTaxesType.subclass: + return DutiesTaxesType.subclass(*args_, **kwargs_) + else: + return DutiesTaxesType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_consigneeAccount(self): + return self.consigneeAccount + + def set_consigneeAccount(self, consigneeAccount): + self.consigneeAccount = consigneeAccount + + def get_sedNumber(self): + return self.sedNumber + + def set_sedNumber(self, sedNumber): + self.sedNumber = sedNumber + + def get_dutiable(self): + return self.dutiable + + def set_dutiable(self, dutiable): + self.dutiable = dutiable + + def get_billTo(self): + return self.billTo + + def set_billTo(self, billTo): + self.billTo = billTo + + def get_valueOf_(self): + return self.valueOf_ + + def set_valueOf_(self, valueOf_): + self.valueOf_ = valueOf_ + + def _hasContent(self): + if 1 if type(self.valueOf_) in [int, float] else self.valueOf_: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="DutiesTaxesType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("DutiesTaxesType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "DutiesTaxesType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="DutiesTaxesType" + ) + if self._hasContent(): + outfile.write(">") + outfile.write(self.convert_unicode(self.valueOf_)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="DutiesTaxesType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="DutiesTaxesType", + ): + if ( + self.consigneeAccount is not None + and "consigneeAccount" not in already_processed + ): + already_processed.add("consigneeAccount") + outfile.write( + " consigneeAccount=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.consigneeAccount), + input_name="consigneeAccount", + ) + ), + ) + ) + if self.sedNumber is not None and "sedNumber" not in already_processed: + already_processed.add("sedNumber") + outfile.write( + " sedNumber=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.sedNumber), input_name="sedNumber" + ) + ), + ) + ) + if self.dutiable is not None and "dutiable" not in already_processed: + already_processed.add("dutiable") + outfile.write( + " dutiable=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.dutiable), input_name="dutiable" + ) + ), + ) + ) + if self.billTo is not None and "billTo" not in already_processed: + already_processed.add("billTo") + outfile.write( + " billTo=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.billTo), input_name="billTo" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="DutiesTaxesType", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("consigneeAccount", node) + if value is not None and "consigneeAccount" not in already_processed: + already_processed.add("consigneeAccount") + self.consigneeAccount = value + value = find_attr_value_("sedNumber", node) + if value is not None and "sedNumber" not in already_processed: + already_processed.add("sedNumber") + self.sedNumber = value + value = find_attr_value_("dutiable", node) + if value is not None and "dutiable" not in already_processed: + already_processed.add("dutiable") + self.dutiable = value + value = find_attr_value_("billTo", node) + if value is not None and "billTo" not in already_processed: + already_processed.add("billTo") + self.billTo = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class DutiesTaxesType + + +class InBondManifestType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + + def __init__( + self, + locationOfGoods=None, + nameOfCarrier=None, + vehicleIdentification=None, + customsClearedBy=None, + handlingInfo=None, + previousCargoControlNum=None, + weight=None, + weightUOM=None, + valueOf_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.locationOfGoods = _cast(None, locationOfGoods) + self.locationOfGoods_nsprefix_ = None + self.nameOfCarrier = _cast(None, nameOfCarrier) + self.nameOfCarrier_nsprefix_ = None + self.vehicleIdentification = _cast(int, vehicleIdentification) + self.vehicleIdentification_nsprefix_ = None + self.customsClearedBy = _cast(None, customsClearedBy) + self.customsClearedBy_nsprefix_ = None + self.handlingInfo = _cast(None, handlingInfo) + self.handlingInfo_nsprefix_ = None + self.previousCargoControlNum = _cast(int, previousCargoControlNum) + self.previousCargoControlNum_nsprefix_ = None + self.weight = _cast(float, weight) + self.weight_nsprefix_ = None + self.weightUOM = _cast(None, weightUOM) + self.weightUOM_nsprefix_ = None + self.valueOf_ = valueOf_ + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, InBondManifestType + ) + if subclass is not None: + return subclass(*args_, **kwargs_) + if InBondManifestType.subclass: + return InBondManifestType.subclass(*args_, **kwargs_) + else: + return InBondManifestType(*args_, **kwargs_) + + factory = staticmethod(factory) + + def get_ns_prefix_(self): + return self.ns_prefix_ + + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + + def get_locationOfGoods(self): + return self.locationOfGoods + + def set_locationOfGoods(self, locationOfGoods): + self.locationOfGoods = locationOfGoods + + def get_nameOfCarrier(self): + return self.nameOfCarrier + + def set_nameOfCarrier(self, nameOfCarrier): + self.nameOfCarrier = nameOfCarrier + + def get_vehicleIdentification(self): + return self.vehicleIdentification + + def set_vehicleIdentification(self, vehicleIdentification): + self.vehicleIdentification = vehicleIdentification + + def get_customsClearedBy(self): + return self.customsClearedBy + + def set_customsClearedBy(self, customsClearedBy): + self.customsClearedBy = customsClearedBy + + def get_handlingInfo(self): + return self.handlingInfo + + def set_handlingInfo(self, handlingInfo): + self.handlingInfo = handlingInfo + + def get_previousCargoControlNum(self): + return self.previousCargoControlNum + + def set_previousCargoControlNum(self, previousCargoControlNum): + self.previousCargoControlNum = previousCargoControlNum + + def get_weight(self): + return self.weight + + def set_weight(self, weight): + self.weight = weight + + def get_weightUOM(self): + return self.weightUOM + + def set_weightUOM(self, weightUOM): + self.weightUOM = weightUOM + + def get_valueOf_(self): + return self.valueOf_ + + def set_valueOf_(self, valueOf_): + self.valueOf_ = valueOf_ + + def _hasContent(self): + if 1 if type(self.valueOf_) in [int, float] else self.valueOf_: + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="InBondManifestType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("InBondManifestType") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "InBondManifestType": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="InBondManifestType", + ) + if self._hasContent(): + outfile.write(">") + outfile.write(self.convert_unicode(self.valueOf_)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="InBondManifestType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="InBondManifestType", + ): + if ( + self.locationOfGoods is not None + and "locationOfGoods" not in already_processed + ): + already_processed.add("locationOfGoods") + outfile.write( + " locationOfGoods=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.locationOfGoods), + input_name="locationOfGoods", + ) + ), + ) + ) + if self.nameOfCarrier is not None and "nameOfCarrier" not in already_processed: + already_processed.add("nameOfCarrier") + outfile.write( + " nameOfCarrier=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.nameOfCarrier), input_name="nameOfCarrier" + ) + ), + ) + ) + if ( + self.vehicleIdentification is not None + and "vehicleIdentification" not in already_processed + ): + already_processed.add("vehicleIdentification") + outfile.write( + ' vehicleIdentification="%s"' + % self.gds_format_integer( + self.vehicleIdentification, input_name="vehicleIdentification" + ) + ) + if ( + self.customsClearedBy is not None + and "customsClearedBy" not in already_processed + ): + already_processed.add("customsClearedBy") + outfile.write( + " customsClearedBy=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.customsClearedBy), + input_name="customsClearedBy", + ) + ), + ) + ) + if self.handlingInfo is not None and "handlingInfo" not in already_processed: + already_processed.add("handlingInfo") + outfile.write( + " handlingInfo=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.handlingInfo), input_name="handlingInfo" + ) + ), + ) + ) + if ( + self.previousCargoControlNum is not None + and "previousCargoControlNum" not in already_processed + ): + already_processed.add("previousCargoControlNum") + outfile.write( + ' previousCargoControlNum="%s"' + % self.gds_format_integer( + self.previousCargoControlNum, input_name="previousCargoControlNum" + ) + ) + if self.weight is not None and "weight" not in already_processed: + already_processed.add("weight") + outfile.write( + ' weight="%s"' % self.gds_format_float(self.weight, input_name="weight") + ) + if self.weightUOM is not None and "weightUOM" not in already_processed: + already_processed.add("weightUOM") + outfile.write( + " weightUOM=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.weightUOM), input_name="weightUOM" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="InBondManifestType", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("locationOfGoods", node) + if value is not None and "locationOfGoods" not in already_processed: + already_processed.add("locationOfGoods") + self.locationOfGoods = value + value = find_attr_value_("nameOfCarrier", node) + if value is not None and "nameOfCarrier" not in already_processed: + already_processed.add("nameOfCarrier") + self.nameOfCarrier = value + value = find_attr_value_("vehicleIdentification", node) + if value is not None and "vehicleIdentification" not in already_processed: + already_processed.add("vehicleIdentification") + self.vehicleIdentification = self.gds_parse_integer( + value, node, "vehicleIdentification" + ) + value = find_attr_value_("customsClearedBy", node) + if value is not None and "customsClearedBy" not in already_processed: + already_processed.add("customsClearedBy") + self.customsClearedBy = value + value = find_attr_value_("handlingInfo", node) + if value is not None and "handlingInfo" not in already_processed: + already_processed.add("handlingInfo") + self.handlingInfo = value + value = find_attr_value_("previousCargoControlNum", node) + if value is not None and "previousCargoControlNum" not in already_processed: + already_processed.add("previousCargoControlNum") + self.previousCargoControlNum = self.gds_parse_integer( + value, node, "previousCargoControlNum" + ) + value = find_attr_value_("weight", node) + if value is not None and "weight" not in already_processed: + already_processed.add("weight") + value = self.gds_parse_float(value, node, "weight") + self.weight = value + value = find_attr_value_("weightUOM", node) + if value is not None and "weightUOM" not in already_processed: + already_processed.add("weightUOM") + self.weightUOM = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class InBondManifestType + + +GDSClassesMapping = {} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + """Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + """ + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = " ".join( + ['xmlns:{}="{}"'.format(prefix, uri) for prefix, uri in nsmap.items()] + ) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = "EShipper" + rootClass = EShipper + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, namespacedef_=namespacedefs, pretty_print=True + ) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree( + inFileName, + silence=False, + print_warnings=True, + mapping=None, + reverse_mapping=None, + nsmap=None, +): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = "EShipper" + rootClass = EShipper + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, + name_=rootTag, + mapping_=mapping, + reverse_mapping_=reverse_mapping, + nsmap_=nsmap, + ) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, xml_declaration=True, encoding="utf-8" + ) + sys.stdout.write(str(content)) + sys.stdout.write("\n") + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + """Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + """ + parser = None + rootNode = parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = "EShipper" + rootClass = EShipper + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export(sys.stdout, 0, name_=rootTag, namespacedef_="") + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = "EShipper" + rootClass = EShipper + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write("#from shipping_request import *\n\n") + sys.stdout.write("import shipping_request as model_\n\n") + sys.stdout.write("rootObj = model_.rootClass(\n") + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(")\n") + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == "__main__": + # import pdb; pdb.set_trace() + main() + +RenameMappings_ = {} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {"http://www.eshipper.net/XMLSchema": []} + +__all__ = [ + "BillToType", + "CODReturnAddressType", + "CODType", + "ContactType", + "CustomsInvoiceType", + "DutiesTaxesType", + "EShipper", + "FromType", + "InBondManifestType", + "ItemType", + "PackageType", + "PackagesType", + "PaymentType", + "PickupType", + "ReferenceType", + "ShipperType", + "ShippingRequestType", + "ToType", +] diff --git a/modules/connectors/eshipper_xml/setup.py b/modules/connectors/eshipper_xml/setup.py new file mode 100644 index 0000000000..31d921367b --- /dev/null +++ b/modules/connectors/eshipper_xml/setup.py @@ -0,0 +1,25 @@ +from setuptools import setup, find_namespace_packages + +with open("README.md", "r") as fh: + long_description = fh.read() + +setup( + name="karrio.eshipper_xml", + version="2023.5", + description="Karrio - eShipper XML Shipping extension", + long_description=long_description, + long_description_content_type="text/markdown", + url="https://github.com/karrioapi/karrio", + author="karrio", + author_email="hello@karrio.io", + license="Apache-2.0", + packages=find_namespace_packages(exclude=["tests.*", "tests"]), + install_requires=["karrio"], + classifiers=[ + "Intended Audience :: Developers", + "Operating System :: OS Independent", + "Programming Language :: Python :: 3", + ], + zip_safe=False, + include_package_data=True, +) diff --git a/modules/connectors/eshipper_xml/tests/__init__.py b/modules/connectors/eshipper_xml/tests/__init__.py new file mode 100644 index 0000000000..8b13789179 --- /dev/null +++ b/modules/connectors/eshipper_xml/tests/__init__.py @@ -0,0 +1 @@ + diff --git a/modules/connectors/eshipper_xml/tests/eshipper_xml/__init__.py b/modules/connectors/eshipper_xml/tests/eshipper_xml/__init__.py new file mode 100644 index 0000000000..73fb9f9e95 --- /dev/null +++ b/modules/connectors/eshipper_xml/tests/eshipper_xml/__init__.py @@ -0,0 +1,2 @@ +from .test_shipment import * +from .test_rate import * diff --git a/modules/connectors/eshipper_xml/tests/eshipper_xml/fixture.py b/modules/connectors/eshipper_xml/tests/eshipper_xml/fixture.py new file mode 100644 index 0000000000..66d43b205e --- /dev/null +++ b/modules/connectors/eshipper_xml/tests/eshipper_xml/fixture.py @@ -0,0 +1,8 @@ +import karrio + +gateway = karrio.gateway["eshipper_xml"].create( + dict( + username="username", + password="password", + ) +) diff --git a/modules/connectors/eshipper_xml/tests/eshipper_xml/test_rate.py b/modules/connectors/eshipper_xml/tests/eshipper_xml/test_rate.py new file mode 100644 index 0000000000..374edaf0ba --- /dev/null +++ b/modules/connectors/eshipper_xml/tests/eshipper_xml/test_rate.py @@ -0,0 +1,206 @@ +import unittest +from unittest.mock import patch +from karrio.core.utils import DP +from karrio.core.models import RateRequest +from karrio.core.errors import FieldError +from karrio import Rating +from .fixture import gateway + + +class TestEShipperRating(unittest.TestCase): + def setUp(self): + self.maxDiff = None + self.RateRequest = RateRequest(**RatePayload) + + def test_create_rate_request(self): + request = gateway.mapper.create_rate_request(self.RateRequest) + + self.assertEqual(request.serialize(), RateRequestXML) + + def test_create_rate_request_from_package_preset_missing_weight(self): + with self.assertRaises(FieldError): + gateway.mapper.create_rate_request( + RateRequest(**RateWithPresetMissingDimensionPayload) + ) + + @patch("karrio.mappers.eshipper_xml.proxy.http", return_value="") + def test_get_rates(self, http_mock): + Rating.fetch(self.RateRequest).from_(gateway) + + url = http_mock.call_args[1]["url"] + self.assertEqual(url, gateway.proxy.settings.server_url) + + def test_parse_rate_response(self): + with patch("karrio.mappers.eshipper_xml.proxy.http") as mock: + mock.return_value = RateResponseXml + parsed_response = Rating.fetch(self.RateRequest).from_(gateway).parse() + + self.assertListEqual(DP.to_dict(parsed_response), ParsedQuoteResponse) + + +if __name__ == "__main__": + unittest.main() + +RatePayload = { + "shipper": { + "company_name": "Test Company", + "address_line1": "650 CIT Drive", + "city": "Livingston", + "postal_code": "L8E5X9", + "country_code": "CA", + "person_name": "Riz", + "state_code": "ON", + "phone_number": "9052223333", + "residential": "true", + "email": "riz@shaw.ca", + }, + "recipient": { + "company_name": "Test Company", + "address_line1": "650 CIT Drive", + "city": "Livingston", + "postal_code": "V3N4R3", + "person_name": "RizTo", + "country_code": "CA", + "state_code": "BC", + "phone_number": "4162223333", + "email": "riz@shaw.ca", + }, + "parcels": [ + { + "height": 9, + "length": 6, + "width": 12, + "weight": 2.0, + "weight_unit": "KG", + "dimension_unit": "CM", + "description": "desc.", + "packaging_type": "eshipper_pallet", + } + ], + "options": { + "eshipper_inside_delivery": True, + "freight_class": "eshipper_freight_class_70", + "cash_on_delivery": 10.5, + "insurance": 70.0, + }, +} + +RateWithPresetMissingDimensionPayload = { + "shipper": {"postal_code": "H8Z2Z3", "country_code": "CA"}, + "recipient": {"postal_code": "H8Z2V4", "country_code": "CA"}, + "parcels": [{}], +} + +ParsedQuoteResponse = [ + [ + { + "carrier_id": "eshipper_xml", + "carrier_name": "eshipper_xml", + "currency": "CAD", + "extra_charges": [ + {"amount": 177.0, "currency": "CAD", "name": "Base charge"} + ], + "meta": {"rate_provider": "purolator", "service_name": "purolator_air"}, + "service": "eshipper_purolator_air", + "total_charge": 177.0, + "transit_days": 1, + }, + { + "carrier_id": "eshipper_xml", + "carrier_name": "eshipper_xml", + "currency": "CAD", + "extra_charges": [ + {"amount": 28.65, "currency": "CAD", "name": "Base charge"} + ], + "meta": {"rate_provider": "purolator", "service_name": "purolator_ground"}, + "service": "eshipper_purolator_ground", + "total_charge": 28.65, + "transit_days": 1, + }, + { + "carrier_id": "eshipper_xml", + "carrier_name": "eshipper_xml", + "currency": "CAD", + "extra_charges": [ + {"amount": 46.27, "currency": "CAD", "name": "Base charge"}, + {"amount": 6.25, "currency": "CAD", "name": "Fuel surcharge"}, + ], + "meta": {"rate_provider": "fedex", "service_name": "fedex_priority"}, + "service": "eshipper_fedex_priority", + "total_charge": 52.52, + "transit_days": 0, + }, + { + "carrier_id": "eshipper_xml", + "carrier_name": "eshipper_xml", + "currency": "CAD", + "extra_charges": [ + {"amount": 30.74, "currency": "CAD", "name": "Base charge"}, + {"amount": 1.08, "currency": "CAD", "name": "Other"}, + ], + "meta": {"rate_provider": "fedex", "service_name": "fedex_ground"}, + "service": "eshipper_fedex_ground", + "total_charge": 31.82, + "transit_days": 0, + }, + { + "carrier_id": "eshipper_xml", + "carrier_name": "eshipper_xml", + "currency": "CAD", + "extra_charges": [ + {"amount": 300.0, "currency": "CAD", "name": "Base charge"}, + {"amount": 36.0, "currency": "CAD", "name": "Fuel surcharge"}, + ], + "meta": { + "rate_provider": "canada_worldwide", + "service_name": "canada_worldwide_air_freight", + }, + "service": "eshipper_canada_worldwide_air_freight", + "total_charge": 336.0, + "transit_days": 0, + }, + { + "carrier_id": "eshipper_xml", + "carrier_name": "eshipper_xml", + "currency": "CAD", + "extra_charges": [ + {"amount": 165.0, "currency": "CAD", "name": "Base charge"}, + {"amount": 19.8, "currency": "CAD", "name": "Fuel surcharge"}, + ], + "meta": { + "rate_provider": "canada_worldwide", + "service_name": "canada_worldwide_next_flight_out", + }, + "service": "eshipper_canada_worldwide_next_flight_out", + "total_charge": 184.8, + "transit_days": 0, + }, + ], + [], +] + +RateRequestXML = f""" + + + + + + + + +""" + +RateResponseXml = """ + + + + + + + + + + + + +""" diff --git a/modules/connectors/eshipper_xml/tests/eshipper_xml/test_shipment.py b/modules/connectors/eshipper_xml/tests/eshipper_xml/test_shipment.py new file mode 100644 index 0000000000..0fb238376e --- /dev/null +++ b/modules/connectors/eshipper_xml/tests/eshipper_xml/test_shipment.py @@ -0,0 +1,233 @@ +import unittest +from unittest.mock import patch, ANY +import karrio +from karrio.core.utils import DP +from karrio.core.models import ShipmentRequest, ShipmentCancelRequest +from .fixture import gateway + + +class TestEShipperShipment(unittest.TestCase): + def setUp(self): + self.maxDiff = None + self.ShipmentRequest = ShipmentRequest(**shipment_data) + self.ShipmentCancelRequest = ShipmentCancelRequest(**shipment_cancel_data) + + def test_create_shipment_request(self): + request = gateway.mapper.create_shipment_request(self.ShipmentRequest) + + self.assertEqual(request.serialize(), ShipmentRequestXML) + + def test_create_cancel_shipment_request(self): + request = gateway.mapper.create_cancel_shipment_request( + self.ShipmentCancelRequest + ) + + self.assertEqual(request.serialize(), ShipmentCancelRequestXML) + + def test_create_shipment(self): + with patch("karrio.mappers.eshipper_xml.proxy.http") as mock: + mock.return_value = "" + karrio.Shipment.create(self.ShipmentRequest).from_(gateway) + + url = mock.call_args[1]["url"] + self.assertEqual(url, gateway.settings.server_url) + + def test_cancel_shipment(self): + with patch("karrio.mappers.eshipper_xml.proxy.http") as mock: + mock.return_value = "" + karrio.Shipment.cancel(self.ShipmentCancelRequest).from_(gateway) + + url = mock.call_args[1]["url"] + self.assertEqual(url, gateway.settings.server_url) + + def test_parse_shipment_response(self): + with patch("karrio.mappers.eshipper_xml.proxy.http") as mock: + mock.return_value = ShipmentResponseXML + parsed_response = ( + karrio.Shipment.create(self.ShipmentRequest).from_(gateway).parse() + ) + + self.assertListEqual(DP.to_dict(parsed_response), ParsedShipmentResponse) + + def test_parse_cancel_shipment_response(self): + with patch("karrio.mappers.eshipper_xml.proxy.http") as mock: + mock.return_value = ShipmentCancelResponseXML + parsed_response = ( + karrio.Shipment.cancel(self.ShipmentCancelRequest) + .from_(gateway) + .parse() + ) + + self.assertListEqual( + DP.to_dict(parsed_response), ParsedCancelShipmentResponse + ) + + +if __name__ == "__main__": + unittest.main() + +shipment_cancel_data = {"shipment_identifier": "383363"} + +shipment_data = { + "shipper": { + "company_name": "Test Company", + "address_line1": "650 CIT Drive", + "city": "Livingston", + "postal_code": "L8E5X9", + "country_code": "CA", + "person_name": "Riz", + "state_code": "ON", + "phone_number": "9052223333", + "residential": "true", + "email": "riz@shaw.ca", + }, + "recipient": { + "company_name": "Test Company", + "address_line1": "650 CIT Drive", + "city": "Livingston", + "postal_code": "V3N4R3", + "person_name": "RizTo", + "country_code": "CA", + "state_code": "BC", + "phone_number": "4162223333", + "email": "riz@shaw.ca", + }, + "parcels": [ + { + "height": 9, + "length": 6, + "width": 12, + "weight": 2.0, + "weight_unit": "KG", + "dimension_unit": "CM", + "description": "desc.", + "packaging_type": "eshipper_pallet", + "freight_class": "70", + "options": {"insurance": 100}, + } + ], + "service": "eshipper_fedex_ground", + "options": { + "cash_on_delivery": 10.5, + "insurance": 70.0, + }, + "customs": { + "duty": {"paid_by": "recipient"}, + "commodities": [ + { + "sku": "098765", + "hs_code": "1234", + "title": "Laptop computer", + "origin_country": "US", + "quantity": 100, + "value_amount": 1000.00, + } + ], + }, + "payment": {"paid_by": "third_party"}, +} + +ParsedShipmentResponse = [ + { + "carrier_id": "eshipper_xml", + "carrier_name": "eshipper_xml", + "docs": {"label": ANY, "invoice": ANY}, + "meta": { + "rate_provider": "fedex", + "service_name": "fedex_ground", + "tracking_url": "http://www.fedex.com/Tracking?tracknumbers=052800410000484", + }, + "selected_rate": { + "carrier_id": "eshipper_xml", + "carrier_name": "eshipper_xml", + "currency": "CAD", + "extra_charges": [ + {"amount": 30.74, "currency": "CAD", "name": "Base charge"}, + {"amount": 1.08, "currency": "CAD", "name": "Other"}, + ], + "meta": {"rate_provider": "fedex", "service_name": "fedex_ground"}, + "service": "eshipper_fedex_ground", + "total_charge": 31.82, + "transit_days": 0, + }, + "shipment_identifier": "181004", + "tracking_number": "052800410000484", + }, + [], +] + +ParsedCancelShipmentResponse = [ + { + "carrier_id": "eshipper_xml", + "carrier_name": "eshipper_xml", + "operation": "Cancel Shipment", + "success": True, + }, + [], +] + + +ShipmentRequestXML = """ + + + + + + + + + + + + + + + + + + +""" + +ShipmentResponseXML = """ + + + + + + + + + http://www.fedex.com/Tracking?tracknumbers=052800410000484 + [base-64 encoded String] + + + + + [base-64 encoded String] + + + + + +""" + +ShipmentCancelRequestXML = """ + + + + +""" + +ShipmentCancelResponseXML = """ + + + + + +""" diff --git a/modules/connectors/eshipper_xml/vendor/documentation/eShipper API v3.2.1.pdf b/modules/connectors/eshipper_xml/vendor/documentation/eShipper API v3.2.1.pdf new file mode 100644 index 0000000000000000000000000000000000000000..22221a9c904d5ecdc6b9f2039fc49658c7077c71 GIT binary patch literal 336078 zcma&Nb6{o7@-7@qY}vt_~1ck*sW9VY~kDo9|ifHoygc$*xf+Flf!pw{;92}g2qKtx^oGgsYoUEUB0nB_n zEJDI89PFaPoB&1+4gd?QAd@f)8z+aLAb_2njY)`&PnSW`&cxJ1`%lkV+5Sjo_JmAq zoS!BtG02$OnY&ngngIA?klR0c$;O~$Z}0Ny1*X5=QT}ugAppR}pyKIZ%AlcSYQ~_h z%|ysd$n@y}XBQ_^Lt8j#iwz?q69Ys810w?iq%VoEa)e1tI3o4@z==N!43HSd)PBp6>(w>8kOKg#Hxpp0K8-cFCjG%&f?pU%qi7A zvDHH13*2myfPs_e!_f8F85`iv@A-q5yp;k0g9H7NK}vvw{v3pVn(@!!aCR|ta`A97 zHG_j@go7p{6O|W(gZ{T6`s*tkGyuT*uW|aj0U%^&)IRB&11_lNo z-#Eb42$u&52LphFU}*&u(ZS>3U}2#R62E|}^a7IyzI6EG)qc%S?;{y|T32rfxP(Gs zDD(+#3hoDsGZAYEOF^(x=lEcLW}U~d$gFK*(s7c|%RlPt%Dh$*&I?w8GY2Gt37ZBQ zfm84gc!e)PfzW8wFp}d!q9?TWtqeW|5D5$Fgn6SBdJAU;wnF0&>|TP-VC>0Y#xs6^ zXmu0+B%>Rm`w0eC)Y?|uRxGeKTGV>u6Mc(KH(^Z-80qrJPH*SNU!hGt_9$_n>ewze z(}Wv<0ke)1|AIO}&Xr5038Vr-B(mNYWw-`5f+w6{!qf_B8%Ly&tO+Ve&K;K0Tt9Gb z0QZG+Aj_I+S`WNACJr8)iBRp-K;qP3!eBI@wimY8 zj)j2BywNP?_{C^vpt-eBdpUXlc~Ja-E}9jZ2Y>`G#Y4=>9qlzhHEagWM7GA#DApDz z0lIb#00ITlIG&{bo4N9jS^iH#kTrENG%<8B`~(E^Uvpo<(A?Dd6D=HnNq+%A`184q zz4^bu|H&5rnnuF*b}puNF3z8D`$GaspD1Gg=k>2PKIOlVN65tShloC(bNq#}&nwG6 z7KpaAAJO;n4LB}^mL4{}1uf$3opS~?7_ zlQ123m63IY?*8_C#7mz|_?5ns&~}*@@>_4Ls9@acOvIw*{EUzn2=p=F5vy*o@smKLlhcA5Xn7)a)Q{`Do#(t;}*}0uKn`z6} z&)4a<(dxn1viG)n_i|?J$=k^6o64ybT%-)9YW%#pdYdWVN>v`3apk9V@!QXwG&lT^ z066QLsblfGdFt*H%RH|uE)%T@FF!y>RQ;~^`>yV8`~um+=X%T{Y2mTt#T)p;N-=gj zY*>evk5g-B`CrVwCFs)o`)NB#IP7EN=K4VbI>xCMSD+9Tz9(p-!QeTe2njN|??rDp zOJgJG+I^#r(6M9P$jq?bg?8$u@9J~bS5PQpQ93}CHAI!#jkZ906IMLq1nN?K^g4ut zN@gNQv!I53YHY71qNqK0v1_E&84YdR>Dj{(~iE-0Lk zT+!n2kTX*i65tXNc%##XiM-_9$H$^xFl3?%LM?lUWb+M2G(y&hXzCqB5A!KEh7AJ> zVnIJAj>L&z&`1X-@)!(KjM6%wKcZuD`*~RtY5|Y9tDHKJTtdG9>9!~;t< z)9IJxYQ(s(NHfyHQ*(Qp|IFR3skB7_T_Zwa-}53r0ZC6;WolZlRB3ixT!yw3BUXvn znW1PMC&Rj6ql>!)wJ~SO_m;b(hgsv5l6)haZx;qFV^HA<#=m>^M68l;4^B?A;`MVw zvs0u9?@9e*;j&eKzO0Pq%>hpkoZ7);UtszXa7bN1#VM_Bxj=M)#m$CSzE6F1%8aFW z(m3aavR3T6Wa(4*{QB50)?D}ObMt)n-cgOWT_8y@lB8XuBDO%2s@Q*oO+!u(^^$KgA7j((q)}GubbyVMf^OBZ`w`pv)z`6Mwe=j{1BcTc$SKf`1moL^ zK-?vMf^sz|=c!PhO-@Ayr}}MpZ0%lw<9n~YaCEbxt2X^$$Bq59r5kr{b9&oS$%W0` z^{&Gcbz3h9r%!7qCLU1E5GS!w&YTf~HFG0#)MkXj5=KF5?DV8Z3*>A#u13&?tU{v~ zvt?7<1U=iAL_YzE5{%9eF*kbr13zWpoZBwzdNTWiJQMlHB~u?nLQHdQT$(xN8WOo< z1u(a+FWpN|csMtCgdfQRo=;~4;Sco7$ z(F$6@_Et{@yQfdQD+7sIt-T*h1>PEs9^(B*hf^Jh)@{0t$DqIUla1s|Q9uQsz$-F` zT3`teklS#5#VWh~;*2?8;0Qj_Ar=J{b>lN%2DPDRYGZpNzjYsKbk>y-tig-U_nnv} zcq%Q&+K26&^8Swy3rQJs zbpZ*W=$YUFj|!Avw80#xwWM#-9JpHIpvO=FF`8@LgpJ#mj4hdadWC|Nq?2Z}S%MD0 z4r?y1V=*h7gobaR*7od375qhYQJ#q}kFoIbQJKk*7EG|)wj?*uF6U}LA`55mnt*Z0 zKQTis$oAZtN$XELyO~5q^_!*_=;H3h;BPl1e^$@`|26KPD;q0-?O*GdN49Oss$G@< zV%Vi;RF{mZ=vhC$Kz#y-=8fn|xg7z}z=}{5aMg9_CLiPPSxQUV<(|Xavma-$xwSkh z;RSh`PZP1{Gm~d)2ssTc*B4q7au2fzfe{*uEe$P44P9)?i1gv{@vAm9;oCg*>v5qy z=>+x--nFHjUYrL*kM*AC^40!)KPJ*2aA5q~C)@p6YfF3f9qZyBExnr5xqrko&>H9P zG6d>p9*a9S(Y^tGbj;%0_X1T1T^n@>zW7RwafQJqmIZO zy8rgL-z43^5`=swcahKWSIs7^3U&d3)7XMlEJJPN@G?Z9zajP75D zL-tgJg0so`@jDqg8~ZZUnp(!eDhUxcA&giAKEwVVsS?-`Og1z!V*IY36^$Gf?6$uN zSVU$MJqJiF0F6*q$^yFtqZ&qF9%?(2oGgae;>zR*8j&eBUNRXOiggTz9k&)pgg7{? zh!VfvEt@bZ+#xs&brgOZ9FEGd^KN7&K?!t+lZk+Wj30qoZC;jvr+g8UX=U5gw^==- zpZPvFg|2$LO0V_erHY^V4dO5E0_aJJ%?#BYeo`tXUUQXjy5rfPm#36cbOmK#}Q! z$X=hhgia;m%J?nI&?Nn$pn<%8amyqgsj-fF>#SOk7?uNDTZNU9WS4wppVX&MsagLZ zLfO2DW(9pon|hLowzfmvu9w#v3Ocxs(QT~(Uy;he%>FN@N<>zwNn7JEA$DKZ zEIAvfb`c<=8;$VYsaYKrq~On%ObZi^{SKd$z^-Cju9Kx47W-A|X>sRE!i%6qwmkyW zBNsmwoj(Hgp~2n#hTpv&BR_D#;k<t{gWGgEcZo(w3;UbbV+2 zkATj{CwUcP^xb1-54LcPaD2pje21)E&39=U^rJ%(3PG`t=R%W8LB$1j< z>kgAh#acbbpTIz zs2E8bUnLm^Jb4T52TzC&Oh4Y4Le2B*^dmi$sv&VUBrZ ztAlNX+>2pS-?}Rv($EOosFHqNHjolU1b=1cxqW?WF(Y0tum)c)q`58`*F-}BJI(_B zp>~+k3_RE^$=H$d+>sbuxg0vLmDcE14JVliisjN=5z--HM+F8BXGhgMU(vnG16h=N zh-Kh~^^_d7pOhy~6aEMN0pNX#-ff>m}1{1C*VIz>q}b zLv}sPcyv}Bxi3aWItLDq`T&E}-DxYX*1MBKvg7E!>NII;kJj+U4w^6kUOPyWrmn+f zwen0gMBTKFATT>eB8z(N8~-#hRTVV%z0PG@)i)vJ1zkOHyL!v4y=7UKi?qx_Pw_*T znVJ&jpF59T^&}x1Es+7o5VLa|Ol*@!HSn!?}4@Ck8XNXjMqvQ;~H} z7p9@UJvOf3u5;(MBXsh#cv#i=dJ*ICyk-bRP#mS<)y?I3$}j<2KHB8%8#qmDSIuSm zlu?z6?rlt6j#a!u%>6dJY};5~RrQ)w_Dwaha;wR%UH;{WRBnL#8;TJq7->ECZ?X6% z82ztUWM^jjcP#Fu{e{Jm%U3EF_{#D}&>uqQHt=1k*j|tY;f+*xhz>XJ1E zn%lwp^rZC46N|KBMR!!&B0J0TxV|@s!+kD5XKXee4@hCwp{ZTrvn!6%Jj1OhN^wHN zQ^D)9`dg%=u;=G2yX!n8VSK$yMLCI{TK$ay3Km^0!9NEadLO6N8yum{8p6LYL+l3E zkYlR!DVQb$9xTb6^s;cn!%HBx;)k`8V|f#Ntz@eo1&lMjwjL@Wsw)kg5fCegy?_9M zLgf%iQjb8}x?Y51E(i%@(yY{j7d+SHYa)6JRNc0WI_+rPwnp5bct%5zVZ1hKurO=3 zLhC;j(A7dS&kwsTXhW7sn#VwJ1WM*d`R+gf+I9WE^ACRK7lwR`NUL!?faz5TT*kaU zUHAE!y3&tfb|1GxQ$ebq;(kCgL8|X^qXc&nL$*(TQ!aP2Kc0LHrynpB5||6}4rRcxpJ4uJ~>ebS@7V(fV$ zsd3po)tr#EjN?b~a9l?_wL=Vf2-I7&y*v$e^GfZ`o-Ts>cFQb<78D576UkylicXjl z$r441E-$SIttM#+qt_Zw4;KE|v3qBB-42HJD&rut30Wr z6a~Q$L8Z&fRdj8{Fumdyc~&0T==v6*f7eNwDWA(9)pdi>g>PstAz@x2k8=ZcJ|y8o zq|$eJv&=Jnz53!LH&fxf;@u=Xr2y`FQz0NT5Z_iWBth*_U0D!!xyZVj(moup$Uj4c z4RpoPfge3I!jI9#SmO0pvNcSBEzS*@k`Q^U8~Ma4F_Mw$KD1h ze>RElZ79D6f%5sZ2pR%)@mKqNP;*sRd32lN{CMaE*nQ1b8@6J<)zNEjmeanca*ylq z7xF8j-4JcLwzbV`lP>_}5ZM%OY;M*TQXQia`K9maYdwA_H(E`DFtTLSW$q0a_T69X z;i3RYDN(gClpslMBICEkt_Anz)Jd|P57X@_;zE(DM%w#leJoZbNViiJSFnOtz(tvU5=0NvzazK2j_*Ei;*J-sWtUszg@QID?Y3D^a8@|eYFj#?AR3kZ#3v;>oE!7Zd9y^lBBPAG@HdO|`e&gp)FPQ7VIqy>JsV62cWcqnN zfU*Xm;N~7EVnuK=Hqdm-QcqxxnDAwcL-d#NLCv4w2-@#uZ8%j!blbX;SH*D>kJPUDUk_ zq7~{TvV<@KNq-~Yb=fR0I-e(ry7(Rx&v0TxU3<-C$W2T_&3Rj<9#y%BH4?_65cJ^m zSH%eolPC=M2KAaicCnBf6WyV;Ms-H^u=_H+%3+?8Q5Ud$RYB*s8`O68x9jHlZjQPn z>9F&raKL7i&Cs6u36Ro1s>l94{scyGEsz ztFVMQaDorgsYYs_P*+O9sO{a~zBOS@{bWC>1*dSda%AXQxpcUeq?2>a`CSrW0!RGj z`&O{)KCABLg)5jxxZ2G=tXZ~ZcX3Im$0r&^&8NTdYd?fBXXxCEZ4K0z%Cu!@zg8~) zfpDxoj5(*pH#&xRydpwcr@c2SBDkVAi-QKSSU6Ub#JXOcrIB&Q{GeSIkbyc+t!fb! zB@;OZDP4hyX@wS=3~d67cG4ykJxke+gg(-?61q^o$hXX+I-5Ht7gQAjnz9g9T;_HX z-ki6{_BG`-{{*;y`T&RwDcelnDT7^fyGhQ9K8F$wY!R32vEL+i@*AlnHLSR$iwXh> zOjULxe1-9zD<)Sdb(e{zTCU_*kAfc+WN+bsuf$l6$c2)U*<2{&6Uo=~FEWe_3T&Ps z)S<5Fo-NbV7w9}cIbwtr%BI^3nc-bHzS0`<$_?~Q8dI!2vFMlMY=+}dmKLU+QAoop zSMMWrvqL(*QW1XRZVWtAvCsd};ptOinLTWpsUyjbz$l%8WXVXDZk4MaR+CP3$0Ijj zuhS@-NF7W%%NGwfzB6!VXRW4IBAId`l){aMi%zDDMk2MB>#vvswJ$p6fuXoFxtc7i z*WMQ=3tvsF@9S-KS>y~tWc85G*fh7Tk4_F3$f3_HA88S~un-(~16!r*1LB$q)#f$0 zzB)u&&@@rdp^H}A3 z>Y+Y}CN*>S*D)y5$RFf!kb1C?*YW|tK(}x+CVz*-zm8r0I}+Lc6N%}6?GpnoUuj(6 zHSoH~AYrbD|M#FRE|*(NyG~j{t)tLWBGX>X@a)-cR?Trt1yV->T*tc^B~W{6!%v=) zjoRTh@XggJj(Z~ImS+o;kZ%XIYB$B+{mgVFuS7ojjVhxSmxr5Io$T(MmHR!jdlNKc zpwa_!8$Tq#lfuEq(FWfvSQv$$;0iVAXeP&ZX#Z28*@8yeCxs?tA8c2Yq%qr&OBtBuqfYn z(n#_XhU5LGm?E&oo9AmVRl0MijA}T^caoIjn&gMDiBlvWu!pP(|Xfm zYcb_hNKa!-;c1R}9F4QtN)+mslUfwphdM0Bc(_s<2As$x43cf9eJUjGXcS-@qpW%g z-obY|b4MI+)l$46*N4&hAGsXHmr`HY8`vYPdUGY)xQwP!>{yKK^L3Xec9^{X*f@!^ zb@KErL!+D0{)}phSM0k@{Nc!%BpSitI|@oQTtP9>LALH~N^?tQ!zy~JmgJ(a#3h}I z`F3XooVo3{kRjXB6zMZVJS0eU>)!~x{%971Yt>te%iYX)+r_M<;dmH1vNNe}jp?TI z0fo1XvP<@h&DuVdnIUebK8yXQ3Qu1fk94G+fT;>*jh4K+3Y!FBeim1^IceK|=b}@s zUIYiQE`oci;Z%z_INM^1uzrUdO{yPQNhsc3VVtgRB8nbqGCbJLW0(G42(OaECApL;4+E!ptl@6kVDHo$p#s1)L;7)utn0F}UFQ!1 z3j~2QJNvgd{d1o9UvbLL_8+;G6gBDSRYt__SM{S#166xzL|9FGr=u#C&&?{YO-Cp` zdj0F~0%k8!Hy-QsT21lIPL>b8SWIq6qC1c0)g1cu^o#3RsIa^i{v5|#h>^zWH5|9+ z1-uZxoKQZDTWj;q{c&cx!u{T0qb}Gr%!h}L*N0v`rmfQB3WKRjS}?QAshZj%KPhrN zj9gtkBTVysT2<`@sSrWIUm21PyP5<&M8^}L0#DhBcwZn^(b?skS8x>7*CC?8npzhE zSL2h??MNYitO1yS7e~MNoreV?#FcHenJbAjG`UTAW%>H7p}9P8H>$POsq=jCN1T-x z8c$Amu%wk%i`k+h<--?;sX)gcsH@zmP`vtXpb8C}n!D~PB3w}Jjd)$-3WOR9PT4n* z)Q358J+klYwXi7X6|ODc5mMgLqQP^5e|Q#{AONB$L_Kw$o4;IYt1}1631`f3+koJz znY+?O>5)FUH(11D<{1rB_Z2O$xkNJGjA}X-6KIkf3;&*50?09n+?B>v^~VJe17Acc&%xqANqklq{&@;pT{~50k-c z4QWYt&Ek4Z#)yz}gdLnWnmMsQIxNq^c^0&;LjVgaVT($E(e9c4;rL&85e33sB;lF9>Bt4_jlj9A_-nQQ0JrX+_^h#I0rgodHL&Fp%52M`i5vgeEs@us1;Z!pr=h zg)rDevw?q$qJJw*_?z?c|4I9@{l{6t|DN`htvS#z@c_OaX8uIAZuP_{xf z2jWc#Nr40h(s&5-c7g*DE=qNXau8uQidH|FAaeEm<;v2mCocUPEZE+jFD~T!XU_L) zAluWc0QzT?O zm?6B;&1I+kJ^zYnk}>XVh1W%k^T7&ZrZ8C1($Z0vjRp}sJ4LP@w+Q)yoNMTZY`>3P zPYwK`T)gxwOYeq{gR5KkK$i|UPdx_B3^i}x_U;OSW)IeFD6=$JH!2lfH zA3x^U^0^>V4g+X^f|k=7k?w`ilA$6=P(nT96%ea1i6fpw1t29cBGao>#Vych3*e!e z;wdcNjWg@n1Y-KT8usJ=$?P zf6n3<+9XL*9w#jw(F!Bi@IxT>z%ofChs;QVLs&uoE{JmeeseF!gYbHLwQbOCT@b7y znVmOH32vAox_L-2FwmtmZp$uqkbRLV=Xq*)G=W%|5>~)RjW>X9eqa6bC4Tn@Q29z+x-@vdonkif3?XHL8=2(fXkL2!K2L~7^ z+12I!;+9Yn=MR;0sFe7T@()1kUPpsD^V7G%-7Lzuq1D~n*P?0JtIVG#QQ;A%lS)}y zNxSA{qZST2#cVWC?OkhNG@jNeVP^PUGIrz?xJz4&IB==;IJyR6Ydp+1>Cmy0Q0XQ~ zuQ0PNL8eF+A4^fN&}bFhFNw^VaHaiAovk$HmmExtY}TA*m>WAGb^)9eCQFVx0w*vO zev=YioYQuNR6X=6Eg-(ZVvG-rNLJnJyr`>kGSOcXvGT!>^VNbhWsWk?>&{qUQcE9I z=tK3T$&%+>DP1R%OzR;vNwHR?OcLt_>H{qa=(RCKO$UE6k(5rB8YkgjNQOhSqKiYq zS^X}(#(Ha?Kk>14t}2RLJyCOa(6@9(G3V7{?1*%wf;<@HUTNv*mk)fUA=Jka;>uD{ zu88zz1yf>uhMQ2ir%|}iX)}2x8MC&IyemkOp5+M2m zWfZ)0eM^gV&GdlfrGnF0RGyP|D`*^2kEbFgvAmf#b%B_PsB#~Vz#oK^nEj}|3 zH1M3LW8rg+F{LKqKCk{mtNypLbWRZ`C{CY5D; z@8=Xa1lexERq}FcJPf?cTM~|eZfo65y!{%Mt64wKJMbMbR|UMy@kXc)+$FqO>v{ws zgC0%_xIH#C4E>@Zuf;z@BEPXykiUi7|I^tZC;NYdL@{csa-YXozMC}}BLS!*kI|@A zDHT%1N?Oe)!m6qQpx=-izi5^DZ(@0OEr+y5BG#TVvlC3=GO*ZJh${Pj&!wh9Z6HiB zUkNrA>{s|w!dj`maH#B2St84tIc5(MV!9^moTXc2G%j@8h3!14y8b3XG?NSZUU^Av z%L}twVih?&Ve_G4X`gWi0n@tYa0^St$g|0F@k_+85^1RXLBs*M0h~Ng`M~fkn$un$ zso?NSc`ugMgP|=N%?q+cLj&`VQ@W+C_GkGWiI53IWJC7`^euV^VE^)LWB4f{4>MW| z@m~<}?f2VJKN98{M$0wlqe2mfbsRKt-VR|g_*^R7u+)dXeY77l6U}j!-pva@B%7zx zzdj@?$xQiRD31?5Y}bxiTPu5MA&~vLtaIQW*r8Z=8ziPTw4w)*zif4c9T9ClxL4Tj z7&rw7@N*v0c{wy$43s~pLi zxi>is2kd@I92q6lZhc7@_^}`%<3(Q3x3Qk?>g`i6Y=7ni%;HA3g44CZAz>;>fXpkt z5iwS5e7MDUM>|KGv98IH@Uf&wnbPQwEMybfPe@wJ0Rgt@28*nf9b~kV7Ph#EB}5Ns zn<|SKl#hW-dsLLjm=@Y|ONXMntVe7Qg{k`;|KM#s&w~5qp$+;v7iI}0TjNm%TU3@Y z0Obq_39nYOmhcc!5`9+SRsjw@-F}ju9-Uw;X#h_zT_Ou!!wH8o{P+g`ScvsWrkznB z6O%u2b}m7RMRiuQ{RZ4$?ey?9FFp&5qo58ZjmWbd`MeF8QGbmGO8C&*(!Jkq#F8Al-Kf+B4u^b0PL{>0rPe#p)hw|%_( z2-1cUK~3SXOm`XhiJ}(>NF%f3Z&~5rir5AJ6sOyLmSr-Cn7UaSn<|M5{awZS|LlHP zIhj9;E}8$eI(ua2q`9l8ETV@`b-8;L{A6^&^r}$;M?A(WQdh(@1IHW(8h~AbA*M!D z6e*$#`T_=v2`6eKiV6d)2<$Bo6o4G8X!G4f?3a{}r|0GV{^JPm=8E~H^W(DjW)nRS zDFZ(}16~>kVJyWKgm?QiN#VESB|jigo?aklpp42&*x!ip3WP~59W=bsdtW|8Yr=s^ z_3W{{!Df%QekZfg1Tz2uL8D$Hvr$Bo?eHL#9zX&HA2gAaca;ZW3kQE-kWm_WGEvai z&zR?lx*;=j$Bv-aaU8qek~lVE82+)giCb|6Zbm^EPDaux>#q9`PWrjq$Ef?zI2|Qo%KY% zx1*C1CrF?pVRCB!Uc+(qu_czw1~w4-<(np8XJuAYw3)4?shO>rk?Hq0h2O5X_EKxB zqFz{cZRUtomk6u+&&7+ty%{JAW{(8TPtZVI&A^2Imk3$Eh=52%eXR+ecKBGXJ4vCw zV7Zl)aB^~z>%dQl=uB_k^W%Mlb$jGRKDBO62)pl+GKQ9cw71$--p#uZ<>jXKnK&K| z`{VEU<325VVuCp%bNIl^qax8~0_YhO=k4&bXtqjjk90Qtt#zp)D0IyqkO!@N8yZyQ~cL}xv5PO@^OOd{Vj zU_1`ykqLl;Bz#fH2T}vU38ba~ ztOeE{{Ig)60xBv5nGlZzWL0>xP^bcw3e~zaC)e(pb8g|HqknuFj9nBrSm;tg( zsHy?p5~^A6WGw?ObWI4#wu}c|HpFGn#5UIx<{G?yK+S;g73CXjt}u|0h(!dKQ4AxZ zIw?@37&0PXJrb3`cmh`u*s+jTqV^~yAh=W5@KZUSX-vJiXFds07zE%pm z5sN2-?dk4`-H6_h+-O5Zg+zw5e9<;a7D*N97?h1XO!APll{6nyO|mANAeEPbmzb2; zE_Nx>Db7&BDK0OPlZ=)4DXCoavlz5!t|+=FtEfgwe$2qs#bnKN3E&9`H>DqsQAUg+ zA4={2W>Y+`Os`1(9kZN%AuCgcuEZY@#}?Dv6hbTT0l1i^X1xx+=ph zh)u?up_KTLb|i6?YLVDo`69M6sWX{Jyj!4K)lcH13q-e1mq^qwUMRd@STyBZpizQR z+>YRobQH6c3Yj-qno{5_RI&GXZGE6q?G4xdz_{nzI#C)q&vDM*x?yTwYZN+#;`9RK+~|VEg2#8s zX%h=_i!>`Y>q)CJ%V+DVMXO)!rLl`<78I73ix&%ig@qL;1r55T!sb%ydW+ibRdx** zpuF|Dg?c4=IUBaVF?_=OqQ3(^;6Rsp#|g*s#|?TB*&5mQu<;pjS%h$wv9DuJV#H!3 zhbM=Zhkp(mle3`kC+j4`Cm)t*mCTfQ%XrK3f7Se2oVk#-&YsTn$TY=z3HZIx-y3AA zXzJ(<!|ZH~!$n6#M|lmx45%TF6XOz3ACG^+$@JWu z*OR+l4Pu;ISad(sy*hLc$TV@{6*Q@xsQzloPRxJeuoo18S1aqmU)4ORoj*NyS9w}k52g2RZ^fHe?Q50;SNhgx zdYs25qjK_~m%tXGe?m_{^P|>9Rr{KLs6MO1bRfIkV`yU#ti>+>T;=T?ah#e#K2B;P z+x32bY)+q*hLO@gk6w3g`cYqL#&6i;@Y|wI-_tx)ZRjEjAyqO}Se0J2YGHhVyJ8d9 zfS}cL`q)4^R!Y|J=d19Vu)7tn_3Ek1$vC_*yg7lN5`o_4E9>s*F!)ede^|V@hqzcg zIKjDPqos|Fk{z{OSXSuIxe=qd^{3(UtV#6Uv=uci>upB|9gnx;7ROEayer-r-z3NS zeeUzi@>NP(uycji%!lQ;qPUsK?~{GXe5LIjXB{iu?9U&gmiKe2<|b39d1To?xjP)} z&fD5nGe{0J#y?_zdQbVYRIei zdtpp`40$WEQL?Rf95*_-YaF?YEv3noX5$B^6CzukcTKUH)A0PFzvfBxNviIA0@3BdC2`_A(&2p?6^7e2ny^$l%1r{pzn z=Q(eZ&_)=l1aK%x!h$fpB6UH*vnbnhH184QATsD*qVDI_*2@b`(Yz<^?bJ!$A5+~Jb;?;jsMmzTG|9yi&{=9Af9-8VcBea*?GID{f{Z74v-a@Nw4eW|V0%gz6HJWq0 zKHhs3$NHDoz^&u7+lshiOQ2X%;F(Z=6>A?g0YF(0RTX{xLF_!L z1IVK#gc&2Q&!8oeCaT#Tb*c}l{DPjSYQm~1{7We8bPT*6Y4LP`>g>a#{;Lg<_J9uv zC(>fpwR5ynQ*8E^u{@s9QrH`X@v=RaEtWT);B3iBV)NkRsYFYXOSc5)(YB+h+cB`< zMGRb~RK4GUI?y^2^g=cIL8mm+kLrmPW2{mqLwf+HHYazyHl!&AoWyiKobjGyooJoK zWSoRsa*HY3!kgLbMMD-okQ}c}9%x%{SatcNaEEtN&-6}lKH|It`7v`HaP>i1!p)@1 zQR##aVu_q_pQamUDnEMslqd30HsWR5ge3`18(4nh8y?X_<-KL{Jsy-U$MITsH_CTZ zs_28b6jmL*M%hMDVav);41^NcCht9M+D(VAu_;@sQsbkgg=E}sFJbyVPAF|)tyuh) z{4w3pI_q&ra*h{Q(@X1dRZc5k4f*u`>9KlsR;o3VW*gJa@Z|_j zx7K44wm!n*dBu6zYO9I3tvlTKYSp4DL^;MuJz&f(2S43Q=e`JB?eaj0hI2niz6!4c zXD%_(uUTB8Jf|bSw799Ed3g8HoyxLEi>g9gb-hq+{D!XBmHhC@&77}$ca0C`>tc?f z2BHR|yP0`BBL>4cqLup@N5e?wOfvfSW69NyVYrDUw{W#?m%`L)AT!30}gMWoWb zExX=9;}9=BxL*P6+25q|XY+M^VfneCv5{Os?)P0`lBcZQY+XhsiE-&u@J+zb16wFhKpjT3aL` zb)U-W3AtGd+IoN?jA8f9k06*t-eUvyiN;EquCPu|A_aKrJx=-RQhVjDV4J|=*{O$h8 zQMFJx$=cn8USodu{DC&WkN(Eh>y5XMKB9(Zx~(?)L8*5(`2nBrP`&$mXJ~WmINq3y zcD3B>6PIWFw5T@>R5#ZfWDLUyC``L_TQM(J_uAr^rvxcZH}a%ljaW)B4pEsGu`lVg zdIg2oo37u(p?diq)S{%+$qX#bL{LZ{SKZ~C(SnAZRsoCtd4HxM8m{7Z)M|s)Mv}$k z(t1dLOnVCpcexlI|{kl2Qz#>f= zE1o$z8zhDO9xUg?5Ex--XYC7P@tD2TWFqd4fu}PVT0O{L9S)Y+54`% zQE(EGeo$yX9XS5F>NwZXRZ&V4?APdQnf`4u4IjY7h`v5Ko|1GN?$qwK1oAw-G^j95 zmLn+{-JaMim$uVK$Mpi$hRJ{DyU-ad#%&?AKzS}QN}WDo(o%16 zs$#Y3&Qa-lpImL&@O%Zkg3*~=eXn`c2G_f6NliQ0+uL_i>ro9H7_3;JRwX3LVxXs| zrluUN*&9JMyAAw;lE70^o5WC!3VpNdNk%hKRz%D!i?+eTBAmgKzaxM3t2#Sn)SH() zTyV!HiCs#WiF4KNAzo4z?!aS#3yr~2C9|=P6^gZCWH{skW8SN2^3Z80gH+LW+QN6{ z!6F&6JXr>!Csl@DJ$k~wTV#|*ho@MgU8rA=jmhTC@Ux!EPEY%ysK6Bf&|So^9cU- z*R#>HQ%{O^lV-~!egaDlfS01DKG66rQU;o>FN6BU+bU?ne8Hg&6yhQwMo}{NNhDcn zYV&*ZVA1gN+&9-ZFr6c@i)-lg5mIQY;4~VTcTMwo$fU7dWbb?OSVD^*FSp=wgwwrl zLLvw*Ot~fr#7>hE$Gjqd=N(?IoaDP}GQntehOzyWq*vVyOmn+x*0T6rZ)2_77Y;}H z=@mJgurL)t2xqB+@b_Y!$+>5+ng?xZ=a*Y2UcvWE543s(RDhlXmyA(wUV@HIxY!F3 zB&+IXfB>r|r3}1&B)K9#T<@1lhcE`Wbt$KdEhU z*slavV-zIiZvPhmL_oX0BDxy1PX0#REX9)&vUH@R_Qb^zOCR6yK$aLI#yGWoTzYR| zo3oG0+!IWlI@mrd9iCiH|Nc@8QqTRromY_q8F-2kqjjB z$%xw4LFu@JuQ*AAX(g1>$vtk+^Xz8aa!&6|h>rH#RQxwoF104D_@&Ab9pf^FCR)uJ zXH<;CIB-B?P)m@ZM!YA*Yll=;%3q1H1`n?}z8^BlbSN_<_<4 zw_5Es?B!D6HKcGt&>ZW@j3r!bEJuXGV4J&-%h(ev9JC6UEpl>lU>Yp_7HU{+rOcXG zUhUs{rb!Xg{r`255s8g3Iz`2Hzv3{6RAg#FrBX8~h7FJ?%7UcP8Pw*2jx8NsSD&(I zLQ+BZS?!f)j%^IRW-tnp)?n6hSI0)H7l=lUwtm$)O{YA4MqS>_Gw*$6OWl_4@otTH zvf3FHZPzDeC5@SN(xo@Icb+*jCoZ1bWVNWH+-|!**%va|qqN#UqNjTM1M7}^>EUbF zj*kwcyBvh+qxdcSfSl2i54r*dV*>uVqZ0MHB$AV)i%Or87=Yys#s&56q?`mXE=os? zM=@FNk7*v*U&ZCT{6u~MRZPx3Tu1l(cg0c{AflT?TFM*%wcsDT`jOW={V{HzT_vm? z*t3?`xf0wlJ}BysoWbRfcKdA-x0r8Q!s%T~Hi?f`Gc{H7RRd2%uEl>ut_FUBpBTPo zlH?9Tl1@@PN|I(L(d(Rgq7x;{lyFN>ec1GZd}We9{MqnEg&V*T)NHWIMwj1B+cH$i zp^Bkj5|@>|wS%9%8V>M@jD@A78)l8aJe_4^cwNBL|Tjw_k4xijA!?KG*~F|jVQ z*=ly=G!zSWQ|mx|U^dGL9NH2~rjBZh(H(6fMx)y#XzWv>`?$0{K~4LJmb}1Z{NOL= zp`sqL{eyY~6UPY>^_&O(XmDbp{2G-`aJ#8#dzX9Js9z_!Vq#n-#kSuY5&h5C+rpzt zwI5b~EZH-v#^W>n4r6w`t0>j6Z;#u1ZxbUS~`YW$*o-_=8&BBlVvoqFh( zG7K~55%pFVj_@c&$rd|)#r6LUsKy5Vo+aZ6x1l|!`NW;qUsgO}-IT1xEmvQ6 z8EqT2$*IsDFWPp>kvv9&&z`E$rr@u`p4!I=4?3dkT8+k(g4p2xU{tGV_^cjw3|jv(oyk(x-dBF>_w;Nl5)QEK3A&QI;M|Hc_6MW?Q)Yf zYcH2g)MOm(U~8m%&5&UNC8qhcg)nPz`)-IHTK>VZ({AjnF(XlU7@Dj$qT6E7v+B8O6Y0>**{Xc z`-qhU2S&LF@y!CrIx~P@Nk)SD)s4Z&VAgD9dnRy0;cfB=j2>4U? z1ihnzTapHh`lck%ETIGs~b{c(2zLe_e1zFWa z+4S+a$vc*mTcSKRJMDv;pyTVH;|XNaXlp?<8n;7h*Ep2;$2(;32KHkref@<3#qHUB zq~}91(V(|EB?6*e+IykPOgcFD2D45$&8QQ={tONLMFd7EaLR^<;2bIC7116xqjDTU-Y%qKSulDAMdo9D_1l%o;1JKic}|^ z>+LFzdt*!AmddKF51$~oA`LzCW#?&~nb}>ZwhyJz7C#{$lr+jJ;8P~4(*69nI4-hGNWJdAK=aO2 zqNy#TM}L4XCecLCj|mz%l4x*?Tr~bhCGF*Nk=@AW1x-O3U>R)jh<8+j@eXYz`s!`A zOpUgLM|se2#LToiVJO*jlp=70E22Zq9%}U+->Nx#S|BdLt`YW!j#r!A$#MQPv(Cc3 z`?iL2q$ha%cAeP6tx%iXiS&uq%wIZir^{oO1dU$HBLvbK)PiJ=aq%!uTTFerlIJ^ps87Ciufp1VIRN$RamXJ`=JLk7^m}i&~+3qQk{q_ z^AC?>Tv6eLe6gLFpSB68sBDYujbLil)D61gK9~-AgFXre= z8Jr`MQ=h)!mI&f6nrn!uFbczm-3&{KAXRIc{7{u+wU? zn57a84Hxkvv@)ioIL{VntV;KdshKz-(^FTTR=MKhX}8vn&uv+}W-Y8v6C^uTc!rE2 zXO7aw*jQqvf{aTo{2|pVagy%eISsylr;=z(G_Mn)9S4I+VHpg(sw_nGSK$IQxIi6@ z{k^Y-U7Vq%BiGP!wJH#vW9ZyERc*2xO!0Y*V}tYSbK{NEyT;8Lmu}H$MXk|YK6z%| z4c9sIC#}6~Zfe80F>z{Pk}cWqic5?i)3JQz;^f84d}%(j$>0yfM<>SFZ@#hO!i#qt z7c{#3F}Cn16-%WRBoj``D7gf6{;aA5JkTZ-Q}M(&6EcOAIwWc}L}kkTDYjwk=n+O0 zgn8GHOF@2Qr7akJ)UO^kKE;UF5jD8R5E-B@Ha@e2#B^tjMdE$R_$-@NmgH2savPVO z)0xp!S?FBE=^Vajw-=FW=yeR!F&#B|KFj#};e+&ce`8gex3IRp!8`U37oKz6c)LG4 znhU9oblNhi2jZ$uhMxe^qc%W3E=K6*WeUp`v|3e%BdYnbAd4 zH(fKYeM4iy9W4#x7Bm-I7cD)nBfuZ1T{$V;U+Gz}%~NfYABKP>Ym+9pmc`Y@O{Inwa`(3XMyG^pRNEX*n^AZ%%t$?UgZ z9p&q))dk^&Zx&_HNO$BaS7cxog!h8stO$9$pwj46YG-V6R7OQ+yx#hZ-YBZHdXxIu zUDmPfD{He#)uJef$fZ^r%}z^tMMj+A=IuIV0o|zE7Hui-SXJ%HP4lW$DydksI-?v0 zjaCy=++I4}Y}LD?oEF`oyV_4^O);q?gHE*541ypaLz$3ov}vN;E}MSqloMJ~B&|Uu z*`S}}A#pnYmXlU;->8voEGR55uV`(Fi>-*Qm_P@2vObN(7M7D(Q7W$YwpJ8~iNT+8 z8&b93+iX!yKPEOuy&seY<)`c$^k;+zEuB>ga`yM{56>x9uE55+AmKkQkw)1^?1G}A zqn)017)%jkt z**_*(u@c!W$4^f6N9pWlmCNO}>zy$!S7uGljN|=Dja4c6(>B$P#mux~{P4*=zis)N zRaqGm3*sx5Upx&nP?da8=)v4Rj%*#(AA#)X#Bq9}3+UbYafPB}|0!775Qx^1#B5c3 zT3vjT)D(91ARQVgQ<3NFhXc$)>|o4Mzc+lPuc#<;!a*GgZw{Ns5df;x&~crUnoz66 zt%4ix$?%R@HK)XSbp*4}@VKe#Cn4&Sc%1oX#ZhB#2O@Pi)kR125ltb(!jVv1}yZh3-j(GS%^MZPv zH$L9sO%oHN&3)X22c%$PeROz&d8Zk{Efw19ul7^#hb|8PM&4l|Quy4*=%xA3Bx`JX zd|a}PmsBc^C(sE-E=THYr-GjZXSGFVc45G&O5GUBM@n z(2+gRk;21f<8;xEaV>~KM!MaRhQHpREh9eNZp^KB#M`6{Z<;zfu`asFIIPQ+sd)eX z7`8BDQif)}wID2kpz}B1X1nTYjvKZL!yTq8;R|7pH9EAJGwL)NXCNll=g^6v_vS-$ z{B8(A#dG>06}gGKLF0%CxZQq4A=evXqC&evWwvOw)~G?0VbF5lhK!1YAeTsv^nXlvz~~Quh_)7wYT3s}*6--U z^_H;uNT0ys&LFpqs!wT6R#tXtlqO0r>Gh$~B< zFk@0psi~l{zMv^~aP)?UudcY6*oVA|{NuVCsC7^hEh=u6TFNgkSjG|~BE6r}+T$Efk6Gjr%(XLDl$K8| zju9MnGnbcCPcLzgFdK1&F~O#a<1U^WS~-*$KQ_9eeq^F>Iuu5r7Wl3NgkAp9$vJ_h zs`TXXorQs#L=v&W=%umbgi*beoEH=0Nfu24F>?;VY)W$e6f9~;@|eUJb6&E>mr?I) z(vCE!babIR&;^72;QH;i!lUZwMJ@Db)N7!B5fiMI3M)n%;1SZSnZ2fh%8-8@tsZ!B zNMzozu8PKA6xqz$NR8ldQ zUh(T1uDW2`?BeXs?X3-0oqy8o;+)Ry)AL)3<1^~#t=dqM-%=8v(J*iIdcw;;hAt6a zM%&U6^ISCAGxCoqG8o4g$GDAAZWdl=L>Y?4_(gT@Pst5YMz>G2dFpMGOOGbJhPhQ{ z=c6xRm{d}RuXGPnh9d;liVKs~P+92e;I)`@q7dtF1H2LzK^;uh2#cB)2V?JsLz6NX zRF9cfo#!|6r;WYvl5>`iv-vV&LM;({CVmb-270l}-#9M4aN4HH850X*%i+h|QdgVb zzHIGk8aqOc)Wggo!Nb-nT#4CP!777Zmz_m)I+AV4hOT6nC~;&=mW~KgYP}_xohUl< ze)2bz{TOsMGiPvEFsQ8Z(BU;0NoE@R|U=WjWWETrTQD7%<4ek1$n80eR*y$dx%t&JLuJu2Rj&^2{}b@E7GCv1KEQB@7CDb_%I zlu9LBBSyvhJT|pXUAE->w$Q4>jn2(!O{EEvTB}k~>#UXg`7a=WI&$|&3G9PW9zc*- zh?qLVRffMLU@i=LYF|qANx4!`5+vPA!3N(?sU+Q!&KrdEoOgnrBd@(!ZKHQjf{Fi4 zStHkS=$45%=q-&5U*Kc)wd0L*y zXtcl^ZSapjuK549QoqKW1ogZ@G5MGvCpzo0Byzo4vFmxfK? z{{8H{FM{!-czbBq`IlEP24ir3Fa}qmcK#_|rPG_VA1@SD8M&U+IG0AFMRcv!_|kKt zN~V-}cu5i#Zq*x92D@=v2B&u@{))t9d~DVU7i*m^msJi>Rtoa>)1c0(n7F8;cxRXoH)z?4hHdGn2E*aO zv`kKCi*q_WW{LM(YG5ngwrZ8F{BVad^urs9v^kJGt{C$&k>A@VIV*g!1S~o{^q}pF6YH=1Sem zdkG;v?&qLB=*p^3Fna34fru@<6Qbw_lsDH>i10 zW6}v7daK$ow_flSIi zP(axSs;65!^|mHe#9fT+1LYiHALvM#**;KYxBS<)fwK0i1XqeT)@M@*mxT_fjP9iP zxJ0X<<2F8|;_bwW=;J#8;>DUaOkhz*jD4pWx;7&O}dEyl}HVN;l)FH$Fc(bR7azG~8k0N`+MhhZ;s4I$_cc zBuu;Sp-1M@>CJ$)H^@J5>Eu25(B6IUnb#5nv2z{0>Za6)C^8f;IWIS9lq1ZgOFFYV zGa)w7p^^0FsI=s`fJ1$yDLFO4ZP3^~R!d5VF!|8c}Vu z8&xM5(LA-mVI)K(ME*+ryVOAH$t?C?7kG$da-RmZ1(hYKfd;X%iw~Q_`^Cz8N}|;AFoB5ml)|yByE}>K?n)VbP+wk;N`*6QLo4rZHRKgtO+pcS6_s z^T(u4J#$WMqFrN6uAI5Gb@sMqf9kwjxA`i9)yc*6v69x|jn4Ghe1&xx!8Whc#EBZ6 zDZy^fDQgH_yl7ipv?;M7ueNiHOJ$BJE1OyAcNKMxPiFDDc_9RWou=jCf=xpi0{;Gi&tn{q;et0wy zFPzCq$BOyQcf+y@A5RS*HnB@86*FVSNs*bH8nHIzq=|N5hQUnFjazh_>5rjLEDob2 z>0y{#C8I6c3J+c2KKR_EU!-zIx$Js}RkFL>Hly(3B^pU(G3fpNc+F#iC~_i9`p*|A z=Tdx-%d^m*>d`009wN07?WskBf>4+TZMGDWLX0te{DR&RWAl4&_8F)3k)rCP`YKbV+{YtUFql}RP3)=;*QX2W|dl79@PU)%XN#7}g zN^7*}2F4g|I!Q3Nom}&zHO<*E8go)jVd;|gQoYUqyROx%_2Xu)F7Mt}A7=qIxork`@H(9@ImN~$)@)ilZDLuJyExaEkcR0^V~2UQtM!Qq(M40bGt#Ec zS$4t&?XO2u=?366{t?ohPX0bxx-%ovok@pPA3rTsAsfC^QqPA2KJo|+P+i=;_ zkN&r14;$@Aj)C~V;6|E~kbL-WSt{1t87~Oiql?-$Ts9}aYx1}lvym6kXBurz)8gv( z6IxQ@suoT-$6{4FQop9*#IF3-+32}4gH|VM%{qN>=DLc>n5~G{ zNwxDU>Q3mcHS_Ac33Lb84S(;Ck^`0_pFB0X7YosgxwIDxA<%q>J;$D*(WYy)_Vk<_ zU!K<+4vERhO&1(K6U>1(pNd%<)Y?rBQ-=o9&8b2xRSH$l-wz?P7=Z;?wy<#- z5oU4B&{&mk{;qO|_gG-`QMaLmerj<7oL~5(B5>_RMZC_XEC8s4F$14F%)F%08MXXs zRg^#5>(7Zda<^V&Hax=*yvE(gU8r%mqip)nH<2{}&SJBvT`_U7F?zwSLA)SojM_s_ zJNZ`zs_7VsqbG8FgD@7uM&^ zzGPLibL{1Frfr^-*yGGUdr?U1#Dw4|Tv`7|}pttsM8TSWcTlIry`W01vTG{rBXt46iFXkQ+96uC9 zal?cpfcHvVuQn~ zii(bL8lCiask1S%2Qd4kkqXj!xPQ?WnE~5S(Zaoa6mgJBD3OsQl~!9t+&PFLohp}) zOyqNePQ%2?YHLzyVY)`0^SP(3`STGPp+pj+k5V#OL@S^8jkgciB))iPw1&eDVOVex zRjJ&j48<72jqZY$H5bpCd;ZL<+}S(2=5S(^@i}dq>MJ{oV_NMQ6AJ4V z1mlf~)y3l%Pe`8Q%5N>5b7Diht8m)5*7c16Ze!N$&Zbg-+QfM!H5(UCPc1pEEo*w) z#3FBQ{j{|4Yv;BG5^Cxv@oS>%cPe(S}syzN5*Rvp1Y7KcxXIR+1gFh4@v=^h-p+UV?trGMDB$zM^F zYmbVI-K3JHyoFb+X?B-gHfQRV=D@vF>`%_&>2xZ~za z)>NzXCbiCClbp)vwVCR--a5;hozbL>UbitS-d4Ld^MvF4V;kE$NVHq|Zz4*Mtodwx z$Cf$f^6yEsM)}YD?fv4!=PAEpK1qA9aNs$eLlcX>&;rVTMUnnw|NTVW^#=hYX# zR}KFU1)5ihrlC9TB?RU-$j=@vR33Rfg*ctGi`it|5wKqTl5{GNNfm|gDd_|X!dwlM z0qsC@IE~?MGDR|wm7^gZB2y@&AX(3gWC}0JNnjVa6vzT50ZqUrq0J zMNGx}fG`W+pqLlt)g&O!CY`D`P||29$-l8CW7bCVx5t z-^mXiB_5Dw%I8psBB>X4k)=n1xR5j{*hDHQgzfn5c9IolAK(Qtfn*>UPHz@k$OQ2e za@=9qh;)jbq>KBKOyIthtHH70i9ia_0ZakbAl(W+PxO!pe5Kq2Y!{v(6DauiNV9^E z!~6|N5?&+CDix^{Hy;6u@Xaep_iy51Qcr<$<_S;YTThZ&@kY`ue1dC+T7|0!&meyT zd^LFAMYaoF@=xIPz)9liWa}ubJ3^WQp`To;iYK>@hVZ=nrm&5i0a(MV1Kh$=`Jclf z#*x-h7<+^?1u8Acp}=*8&`z4ds06Aa_v$**qjI*yDV4Pn3h$Y>zFNttMSR!X||CD3+d&nAoKX4D3%kLxU;>To-xZ&s^ z*~uDpBUz*R`_TdQTn)5@nS%Dyalj&A>9F+G!iD5SsgGPR8p0;|W#JOy1h_C$ID|AJ zg+UZvOV$W;$;ra;WIF#hawq>9xsD%4ZdI;tAh&Z*$lv4oHvUa=9XF5M$gPk+<1Z)I zbF<0yViUQZe+zgWdEX!#fjhZxaGy&~YibRDpGRN78pDJSC|et~c!aJ!Pe5a<>XQQr4R zs{DL3>gcRJ@1Iq@>*eV!sZw9PN+FigM{DpY`3gCJr?K9v59zF*DByd0SJ^{S1 zYzENhQ-D0~8TcWu11~b5EzK1C2ARJ9E`O4I5d0oU)0n>ydI0{yTIMf=9;QBm;s;zRKOdRzA!EYka9O3igSlJSC4U#5haV9>4xIyT zS2^%a%%>_s9>ctDkXl0Dhv&r2bS~t7rgMz^5}V6He^G3!GH->R5qFbrHgAO4~ zVSO#d!oU3j+egxo5Zv6ER3*^(?R_g;m^&4F?|uwCk^6K^vywXl`23QRJFLDL$*k#qHdo8AHfg% zhRjptpe)z~JUdyOPfkI95EX~-=Hr`m9^O#hAxZ!*i7vNLS{uMRXogd<@Jn zbe>Zc!B(fo3P#Dgs^>ooXn+pRC5uFG>~ahJ6R~~C5wb}Fi)Ka-+VuG z{yF)8*hre`+$^S$#llLoEez=DEfoCKu*X+p{=1s25bh@jfdrsRc!zwzcay7uslo#C zHs*p{=<%ggr};#(0(#ycolfeZ-(LbJ0WxqA`sg#@6yOkWDSZdN-xd4=_Ru@5)yR_mWSAL!3siljr$&$&>s#G7(rL9KxJ*h{UKX$t}Rc z5f%=~vyuK0>T^5r5Wj+K;8)0R@#{$jzg~WdKaDKnPm@35gJdNi#N6J1^ak9oAZH=p zSIAe1e1Ai}&B*ry%KA6(4`3lsEB27b#0qjfu1^Lo=Jt~xgfZj?33HN!xk>#4fO$h* zuCPOOH+hBv@n^quGkIOQi=2b@64di+aSv%kdM3(ofxWO<4qQKnXFt$*71xU(ixONn z2>&99!gVB8coaVEb!hu_&}#!(uE~PkfE-lsL1(S9EM7up@XwLi0^$PUOZgqK12*~% z`R`&8*(UU2927ynieTSNu#F}%2k?m|`8(hRpda_kfK5O)?tj7r#kqCt>g^kJD;CM7W2(yHNQJ_K(NapX%4UyhzfJIJRr3J&yd*) z7L#V_LEuK<1u|dSih1ZpAYFbH{@h&6Br;bznao$efId5uJWX~U3E{N}FXf^5Qr-((1oQ!KgwwtN`os8wrVX24a)npqw}9J$zXQ+F zbj&ZwfMzJBfiLKh9}M&7O4_N1p1MUJI1@D?2U zSKc9|{2Jm1meF_?<+O)!ig*U<#;_cHc>-d*6wGPGG>#n}qYsb65nFlzKZR=J5D;&) z@t=@~;YU9wG-95*T|u8vLtYbRkSDoY$sf7r$XNb8#6~m7pT!kqpRj@K1)f8k^+cE- z$9%jFHsoRWTaO6=^0in?4j?vOh}iW_#2{zE7yUPdxQIVK3?qf`1ZM%V1p^lV8UY?i zr?Cc*%E_D_C;*ay8bW^lj)K3F#DsAL@GN4Ns9|XgCm|L*iTH)Y(5L)`hzk~R*A0vlu7dxotli22>b)gWf8eDsGAQs}E6TeM zb~*!m3x6kPrx32M7HpGO0p5Hx==S19mr6d>(e~SyCJ03Ef zCd@*N@Uc7vHu~@4bopnLl}Y1$%xn2HK199fw^H#=wo)g$hoJy-&+vVVFJa#qKQc5| z%JYG&;rkSkeF+BnJm6Jc47q6AB6|*Es(f8|-ytZ>lG}iK8ka^ewBI0q4}3Mcec}Ct zDZ};>TE$NJ)}g(GVPNYPHz_jsSRNxK$X@_^fRBdl7pxw>XFy}!!M%c^eSuokw@P?Y z&J+vCe0;M~a^Tx@Fb98txpX6OK?Z90yRG4MwhH*`%DS*Jcjv;M(>2@x{OTO}C*dae z6yxBV-zPtytVPh+I1ced9Enrb2m#oIc4cktcA^vJLheh5O1%KzdmQneT8RVcI_WfJ z4v!DKI?eYe+y-;cICFE-OFMW}< zUbfyU%ZQEj!dmh);sOIN%inVU!hIUiit$si1jqd3IpuMZ3`KnDibp5=Lv&G*1g?)6Y3kgLSv?W5P8<)y&9!`7C= z>&fADK!l?wddj-g#2l)8P28;1fqf@5><9X85bn zS!KOs7cpV1ELHZm?m_yGq!hkeytt5L2|LI*p+(N8`&L3fnF70g9Q-+6c!t!eX2U<* zESFOsPTC89O^0{czEHff-Zu~2g?VB(+wT!>gTF@iX^tZ*;bJltaH;=Bnlz`w22NMv zolezlq)xpA;|jhd_odAI!z1kzh5b)u?=HNrhB_5S{7m6ExnKPv%I<*8!!uPie9e`J z!|D3uDBl&|`8!>s8hkgr&o%fh)K^)jx{OQ|9zjfcCE5^vfAroNd{GOYbz$z>2j8wp z=#m!-bPZpW2T)eL&@aCR-*^UH3&UJTzmm{1GlM2#`Ag09EV@6a_j z26^s?{m6x|>5N|-=7&!dwuc6A-7vWS7};-BAfFDLzq`1{-Cl)NQAD?zr@q!oxnWc{9*fm z(ZlxwmHofS9^lY^UkuuEF3KWGo^50al^5FK;@=Bh8Lm$rZMV`!+HX|XTVQt{ge)qK zu>W=h9Jcp%1RTx&&r$9BoJRbbM`6cyA&xi|*N-T%MI^=;j5Wl`VU!PoKJy~>I4TV0 z-vnrhS5rNdhS+hiEb$}6%sK|lt=?hZ8}Z%WTLL+OqPZ1sA99jdy+rWy#Z?T`A=j(iKR0U z-%e<1fY3_TDf29Wea8F?{6u{`;9(#J?KLRA z;A<$ugSj8=q-)72dm-knPhrPvl{xwn=sez0O~X9=B574;Lw^16JzgSfq%FXAGPUU| z#if|{zQ!2%8a6PF%%KoommiYM^4W@SVIv93d|ODy;ajB`r{k&2A;XXP6uAd&t>C{Q z4rNWdgj6cN0QwAmz!dRz(guIe2;Ch6AIE}cUCQ&@@y%y34?Ig|pu7cQDcJ&h#Y=x8 zS@0QNMfwFQ6FEb{$=>=uR$MBH@a3%%aC%&xpYmP#?**Ax|F#36LR~o zxccm)h^rq{V(MOUBaN?>*g70nD>3z56_!)T;&re6b$NAIApZNw-2mRU3 z*O7hvshGPLlD+&KQpC-dKj;1}e}?=fXdP$LS4QUw!num$i>t*5uSiQQVzX6 z5qh>SJXWbMri@YfRl1I)_*h-?8+?JhP22*%rT}xw0rCK3bhCn9(nDoQ_msIONR+a# z3|Vm(k;UAdWFO>vgK~X<6mak*xf2jGCX&18J~Qw*|FQfN_3zjL}B&1>eeTQueXw{&p+$G9~mT z-yZszp9++st^mqd1JHGaQ$}G1e`*{EMGIh8*VI#0i2A(xGYIBg`5Rt^3o z2tKhX1x6zfx`hd>DSEevRNG;b`me zOL9&GABJD4$0YEPFrpsNuXz-RBYqaI;&gQH4)@K#7+|}C3(0AGJL0b=l<~e)xGMA- z#@~bCHNioMLHP{eT7idOoI>sfZV?j6Ogy_&Z6j|3xAK3JzvXWLUI$K;Z@~3?+}}A< z7^xKe*<48&nV$e9$RaV25oDu|?i$LU8Pk$kU!p7Su!Ukv4J6o-|Y z#^nQfgZv%2378q?diW6O{7EETS-ViqgNE0-z=)f!7pI2)3=Rr+lUC%pO`JuFq)RX_ z-bhYT@+H7d(z6U3$W&niV&`#)_1_@5m@n%v?oxqpjHL6dMJ{MShq`a!#WQuCH z{Hc1gJQrB4cFK1njmP!Lz*)Gzix=fj_~+$?z-oR8y+^G4ynH%v7VdZ9`JKR*Kn-xb zR4(_cMEOi$4f(Y9 z1FQvR0?&qV5wKplej4`tY1s3NpN;$75&t9NcL-N;>JffM#J?EsSHPE`?+Gc$W8iY=9w_328lV}d zmJ&#d7)0!wpq%$h$2{Mvx|yBt6EC5A9O3YgI7j}D?sq8XrRf^{}-ijnuJCZZm6&E{l`V>5}*&K2#d)n(i8F>Duc3qew;WN zZCF9_#r3EcA%y-0ScLg|>963yPwzll3wR%JB? z7pK2|zvofO(fcNC6S|km6a_cqI@m^8yh+6yJ~w`1nc!9yt}VCdTGsP6{}j{yhDi24 zKz!3gV|bKQy$Sa~(bIvgTL#M?tbwA<<4);<|`klA#{s9qow=MY@ zFxhr|9(@_#B%~yu`$*w0=WuxZ?EFoUdZZ$^$NuPs*L3%-)a-}gXcFP#@H>jzV8)i@ z+`Ji6tq0}~6@d+N*5XfVt<>$AMUPaZS~&Si(9p;J^oPo4E;qU>&lmiBX^uY_U1#Qf zP{4;^iSGNq;IS@^l#Tfp4uLD(MoOEop1VKOd=}dSB{TULDrNIjqP0ps zB#qvBqCP=*LrXii4$(h4DI1mXgNE}$vM)-;S^!v_;ca-*ThB>j9`7j^6up9nu>C^u zpS?ko+V~COi#-G86Xx5W?4y$i3+h6PF>r-mI5KVc(>)`;=>ml^g$#FLs~JBtAz{E> ziMn?ejo1kNJG4nq45k>!>Y_agn1-a)z;9?m-pNEf$kdZLUOzxMF}dzYZ_jpLboqCU zcS&~{t94=^@{iu^g}-6K*vlf_B6s@zNbS2uC8tQ_7XdB|pdbP)5X<4`mxn_45PzPL zyn^I)E3ShzLmErzL%bQ98T@D=9oY853`X(xqR(anEqFp&0YCN;J+Fu$ZwOvUOX1i~ zJQu*oG|VEV3PWJR2o&Ee;+B0kC(w8<*Cl3wTg@aSd=t&5WZitU{cKFVBoq10LE`SA zi=dg2M9Gag1<;Cr8kJXA^|}P|Y#QeQXVM2utYjB0#LEE0^7fZ0m*v`>L^UfslZXn2 z+o>D4&#wXgQS2p6UR5)&7?7!W&=}JaSJ==jPz)Gbt}qPfSTf+T{^Jda@0W z^vbKMe3fP9HV&dK)d^;ruC!HqjQSq$uFB%PGsS$j^Z5Av;&O@=mCMSCc>nsGNj}P2 z>VJ-F$baAO731R`;63T**4KG&r!Zf~YO(#<`uKXlOkK_J+jsn-mRNz%hlKAQt54Xz zOPZ%e{eO-J>d?YRT(=2TzOBID9CA|8UKGF4OaE9{a`7XE;8<>7ain|lR*edh;M4Tx z`^TG-qB0$lW+Y2}4%Y7X29r1{hlZ)%;4|C&2JqQ-zIeQzsL>p~Ifl`&12)J~x^NS? zIS`sEY_vo#$yonKvXZVoeTqeefR^7vEDtP{1QtrumIO&ePaVM)j3)V1L}|w@#B#K$ z3pG_{Xg=D`c9-N#y;P})H=_AyCsr!ih%-BOl1U>j;$-m|;=oe)BL0A8PUPP{ayI-Q zNS-MFEPg;3{|ZQ_tQ*e*lK+^Gud*FKLX%)SJ5ZAH%dn>;ol>vQfw1tko*hPEz?B(e zp^1ugd@ZeQO^?XpA?*>^>xwTyo2T$)s%VBbIfyw~I|Q88_4StahQ^<^Uo`eGA>(-`)kV@>LX-8QSAiQ53lio$3Uwy5=Kj1EGg* zxahd4ruc{9J^IvbXBG3Bz91EIk=wATUA|HKcm>TudrBvY{jr_<{N8=sM zvF)W02*Iop$aki!LL$_fHx(FvC3Ik0p9O~Y9()ySYm1&*W{4Rdr(r7;#e=!=LW*cQx`uu`s`^u>Fl_!mDU8{}oh>h-uh3*IY zl!1RKBV3WXEKl3N@U05Q)oaYreS6*Wnh$=9s14fwn(Uf*xg#Wv%2fOcpZ!Z#>azTw z$>DP|6ACs@yB}dllR=w7Lu*Kd)i#4NvJ3SBr!Ky7t+FLn>s1?9>sPaqewV6a5hKT% zNcQ51fOYeF4P%Y;RP+m}qvCo$lLs2>)O`)KCVL@==nQcmzW_gq37!UF1rP;bU$Uar z*9he97*Kf_d6;?6m{`%zPRS}2p9eo{PYOV+KbWoo)?C+^x{SNBiuz%lS*d=HIPDni z#O+b_`rX0UW*84)cWK>%7=oCB&mdS33D6~%oTWL<fj{0G(V>9pMe8%2(vZIHRfJA_dLrAlya{O0xJp$y9>Pwk#9y| zBUEx3*UZ)!*WA_wEU3S5eu)msLiwPn5U#i5Fyro~J*`VvIs5?pVqX$d@);BhiUGxx z4RsDF3oW}egz<**dNuhALRUAygaO=NUVIV$lI#|PdcDdGQ{1b(GRt4ympAv#LCO8e zBghtD^Vg#I&|dtN9i}7G=$qZ{3>th7FqUmRURo-EkL))A8ztQ2Ee|Uz++B&5KDScH zHhN`OJF5@wBz^=8B9>;)gn}gu`|>FH5DU%DZ=##KYb?up3Ul|&}vI0C*{gFlH< zpN_C$mVtvyiQaAVKHnSootvtT4S7?M<$><}TJv996>iSto#oHG*Vba36H;5n0cA3J z2&1{0uT!y!wk2!RKl%acKKv&JpKJ52*DJ9B7wS9SWBmIk={h21)>oB=08#ZK-?0U8 zaADb>I%~RfEq+JD-ohMgN^g2e*jC?paO=l=z|Ze6lj()2`m&nB0vjt;bY%>H;=_TA z)07{%a}2T{pUNooeND-WvoD*T^2!iUuDcw|*`*&N3Qxi_UNTBcRMm6tXJL!(6z4m0 zQvUQ+!IqnRS^l~S2IQyNHf?j&m@A9VMm_)!LlcFxwLh+&;!RL)6Wv}Z_xR~~JN;KJ z6GvNPnpHomkTTL%fsoq--7gAu{=+?4y{l?R1tL)Nevb(HR%l z-OWH`lll_ek7Aq;k0>iU72E>!tsy=@03i=l1k5os{Yx0M8s$w(duYEoc;tO3niAZ+ zDKXhbn5p(yVVnfI^$q{3B@%=r8O6!0BMzQdC5JxV8b3lF)*syU*Me`DP}z$S_rbh@ zLP7sWI`aRO&09oRRP2ADBTI>i|8HWlqbE8tW>?dNo7d^Xo*e{5}TxS~eTobIb_X z6Jp#;?Nw+*=A^a~&>fQ}KmQfgf6w2>TD?f-c$II3@wOTz5Q|6koPhl8$U0x6*4??qKSe|jXCG_TZ&1)zPqsdAe_(psykyFI9h1c8C;Ntn8xv!(u*pbQ*a8mn)CY2dTz5Z)?AQbA9i=tZ>%^ z7>%L`1crSJ)~t0&+#Q5Bu>3~muccC#d?Fpr11pXJO%IH_Jx!_U&7=kvWAH5f@Y4_*rK_%!OFR5E;1HH@aPg15+48JIwFG_>j?F!$& z_TQ<=J~av$CZ>w5!>1F;**7>d^9R=rJyN!Z`n&}yDf$1NtE%Z6bSgJ|LN(Q6yv6qV z%bIwCF5Xk$!!Axz=wJl5A~~jIo*6x_r>7YvF(|HFZ*Mqcs-2mFx9a>poQ-k+t1bDO zNyXH)=o9aYyy)GD!X*qO7E=i}xw27LCq3j3@ohlsji5$orw!4zqkT!Z&wvwd1>rH? zzi@_0iumYYF0WF?Hvj|YGSacl=+f|rK;>1J4x&9sN*DaE+w1?L6wo+?gQEJ{51qCo z-alUDbl<;|JAC&6%Jetj0Z>V~-&jPhW1tw$q`vAemb7aa{F*bg<41&PZ0 zd-qm%$xHs;`;b*3+6}h=!#g}SAO)#zc*&FE4H$P{vu8C)Vd?gMDp8qY)#H@1yV>J- zQ&#Ed4+`STo=6Fii~Z4fpjx_N>KZVv=zCqJI~i2=t7Gmw2D>?k5z1dxIOzTSzCY#A zAOL{zIDM)UN&s=t=QzgJpPBB|mRix{Ehd5LmK-Bb-^^b?u^vBXWzK#CC4Fi@@eMP* zqUZ~$c|}0@JkEwDh;-eWOIy~pfh*KqNuKwUuFU5w-Nu>a0@L3TNBDnZkGkqAf4Cfe zI?S#Ro<++XJoHzwFHwJsY_7CI7v%Fb=6#}^ceFU;+4^L#+a<{EHmDK_oBc2!RP%dr zMl2VAsilLltSXLlcWhU<$y>)OKAR8_GYhzeC@Bnvj)i=%kzFN>3fH#1A3WO@8?DJY z>0nftlX=R3u}xy~#H@)Ak%hSAC~f*rDTEAiugF!h4#CHZ3v%%aZW zRa?%9-A6}>(I`q`+VYgTD@q!IG_4v~FtYAK3U47O4H`xUy{W(`6eDn3AiCGC?!RKf zIrkU;1P87$tul+dW_* z7zV(*!@MS!h;|J$IeBs57;PEdjtl!QpKq1TrIvj&cw)zhy1>33UV$(Ge*$e|IEJ5^ zrJGY*G1j?$+eq8aE;@?b$f>60-~tu+CDoI;$gU|IB7fX4dTTflH$?z&Jtg5zIV5Af zkPG)XM1&oIf!9|B%D1>eRUn3perin54KHcxTJ^7qHHl>{<<}oUE)BaC8@;y@BTveO zvy1es+T)u&^~07Q{?Tyds7&>8s#~$kSuFSfubGr#}I zpmXgLue*9ShAUJowBVRcmy-CRaJV4a(4Ezdx;~hrDgroCFpQyskyeO3a5peZyPdn? z$AFl}Kj62FJ8ty4xbQKSv?G=oWH6$5WgzZMf4erCX`9oqTt~mXcq~oq`YIz#;-6ar zkF~5_Trv$Pku-XBrY<5xe!+AJ`1fiaIzO|);%B~?pcyomIP>`Vn9p^Nd|BN+R@$UC z&djU)v+HpQ;oTKxy3$?0^oWmMKa!;0>~n-^77mV%E;wXB+EmTC-TQ^li42;dB6?iS z2YyXEESs7>(2({rEzWd6OKHZTo}e zjL+VEYrXK{Y9YR5m;bVf$d6;d4gAI;Kv4}_B%Z6w=#MDU!)8XPBQ5GeR#=Na(lrP# z@S()}6XNyi)=Ig4F%Ow=IC|V(E0}Je>J&+KS2@Yylsv6B`BkM?&PJDukI{0OQ ztJl+7OkRN*=T|SD8|YTC1DWIwG9=hozm&PlHnX89Wu=)AwAO6Zn3|ZcCp+TS4qBwc zdvyPv$G;{m5yep_JU{3{&R+L^e$<7T$ey6W-GDB^)-ioqwzpGs#oB?a2mS3!1=$H! zL{{RLPSy%|7pDb{8kAr^1Qt8LeyjJT%I90qZo-}~?hYI74#zIy*+o>{-wLt~ZW+Gr z+l%r|VIbs;Z8hec{QTzU1D|(jF#jJ4Y^3E_Y!^~{<<7KH-$S0kd+H@L=cej&Le<}q z07aLj2Ch4!OJ|Y<;(rMqSjbl$N_j*Le1YspYgNZrgRLQXH3~DHLMol^fhCef((QN% zW$VkT0UCHa4Lw*4@7lY{n~?#_e!b&`2DbOw0|lb%Rb$Heb;X`41$Vq+%$YUY?YaP+ zMN&d-@KGV#7AIgUgp?GE1vB#nRhS=X^!;kYJ-_Y^E5(J5P;g!t=1mskkU5rI=knaW z<@mwh*!&N#g0nt9l;9-&-K|%gl*D&r2F1EX^4Em8{$8HcH|uS`m%AhP8QEPE0}}>j z^uo@H~Wk4e`V+xIlwIAn&BM6jDaPP=H5l`P{oQf{KI275?oxCz^ zTc9g%6Zp?`e&#mSyJuyZHjtzDHrqCtc)7p#)_~Jc^0HWbkCnRQ8}d(bImJP(=ut7@ zLX@GGY!AP6pu_V+mCc35jh}KWJU)b&g-Vej^EH)Hz|dZ?NxSFXo=2ETVb)@O!t@uD zh51ED;-i+Z{FBdX&A+0gk_yHJmI*P5l+-6*c;Nt)m`HK`Xj^4Fij7*sWn!Bg$(9s; z<}?iSb~e>Z{sFw&)ew%KKU|#0kdIZ_%rPc9!Hsrpzl@(L6BpF>^^NcT=*T~x`hJOg zL59gv1Mr~u?(i({2(I>oV!FQJJ`}lT%+~o&eXKOO3nhq>qk14>2T@c_X3~=_#GSqe zw|3NDg;82a`YEoVu%zhMaQF4?f=Wl>wF)@KUD0bLmxK2|@WLu99W4k34B9Y(OIOCH z)G~`kwF(QyAIy|Q|KRLoCH+0FQ+F_X{%xi8Xyw#sWp7R6WvD?+`d7+wNnj1af#hhfu$^G1lwYFF9|TN8 z6;d~Ry!_#ll36EMEpeb)wQudfl&1me)-JCz!`o9H8>A49Itm4QhYZeW0cCRfmi%bu zE14{bYVjKP+{pu_vZ%UJPO0lj|BD9I`%PjC{ijnYuJpss5y#;fxiaXt`Y!v?i&)~* zS5jdO>X9Nru`LD!6MqfNDz0A|Cwg~(sUQON00~{z#AD&u@UL2DcPrL`uSl&m1704O`-OGJjtpPg9 zmBux&J$|TYdREkk@B*XrHY0*;K}~hC5usx>sX0OvTV$f0@f85zg0^5C77CSH3JF>O z=2jS&-RmJHc8{H2<_h}3{6dyL9NmD3bY{J?~e9 zl=qR`uqYw@n7l27f5IX_|Mq^aWttAtDNVkZOFHd81NClM4Z*}8w!G4wlRwKFLOr9! z6uJ(uWrn>SU+6>V%LJ@aq`wDt^lp7bw}1_@Jnb*}KGrUeUVcat8gYBmzfk19ZC zr`^J2Q536Xo?o7~87S~Q@xJJsy21#EMHgn@=<{}MG5jf48QJ26|E_>IyV|!7^UkHh zqH6iB=W4~2db4GvHzC?I#|k|x*;V9#wH(maFawMG?!3ukp!Knt%a6d8S@i3TZe1q>sJe$3XRGXqw#%-L@se1VocJ0 zKM<$R2U0paIIi+8JHA#LG;he@+nPF0J*pb@2WtSHcY!`*A6}p;O|-@fvM-5eXuY>bTC5bOepXekxR zqYoG>kwJ@rCLlzk&0`~uixH@^@fveur7yQpV$t%?F~?oLw(=`w$Pvtx!BMa})bzOir}S?Atb^nMUOQO*e;nR!-Y9cU~#Xp=Xm7lf?IV zQkY2}Z|g)Cels_xMYlLJxwH+)LFfjy89Urem5p?73=d5g2GCn(G&?dkDKud}vi&#*hR`%O3@y7w)%catNeofKru+1^N2iGaW@nTpz5?>WeA4dqHEvj< z4Wj)TsV!T4$xMwZB^PfG-;iSQ453@dD|1YE7Wv5B8SM_eJJ){L-a^! zTZ8DnZ~j~(c{+JSrEB4D18FYcwoWz(^k^Vd?7P}3mS?O?@mL{a0MW-y&b^{8DW{~x zP|;zZ^o z{Ar4J>o&;g%X_O(W{}6C_3`CwI)6>3b-ARHJt#*J`{wAar*KD}*HPny)p+0!`>D5} zT=sZKdT@8%^Z*t}No?e>#LsV<5$K!1W7j3rae+b%Y^AI;^6<(e>%R_9|fc`_h`> z_-StAFYiQ|`&8W~eC>I?Bm3BDRDISb=VdCTtZw4HQuhM0YLv$1s!FxT>o0JLXgk(pTVz`5Ipe&R95(V5YE7%lE|?mP;r|n~EkdM< zzsio9-Oi*tOq2teq5-blb(LoKB=d8)N8;XO@;p%J)sjqgWg~P}Yg&E{Plc4qWZw;x_$3kAw$@}u|5~yJcTXEEd9P5Uy3lkL9wDc=Jk6Tm?q@kabm2M`?PQIfBRfV@G|b0VP*_R8!U^x>$%jUz6|f}Y2c1}4 zDIPG7l2iAZ>Lwvq*f3qkxJ5(>GI3?F*0o&jFmS#rvk($uI`fpMe7_VU0~{;*hHq;+ zB5He#{m&wgx3IFFQtUM-?5ZFLx(wxfBI6i;%8C)U76*RH6%7wRGCtIry`V}#XdA-Y zh38@3j^z>qxA@2)#p%oti8fpm?0MdA&9*uYv4o1}SzXT;&f(K!{;5Rs!*wpl_Q2wPI=a&7sq^; zqX+#=HAfrj<&q9j?U;Xr8HWNRZ=h32@&&ZzA;b*&}(jYGi?y6J~p8TiI zxMzw8>@EOL8Pjbm29eZD(V3D; zKO*?&GjY;+m|M@QBcPK!W^csDops(g)8$uwjCu4QZ;o>E>Q)KbU1y%=XU>yi4=-(*$Ls>1iTjlIwO@0M zH}v*fW~abKG>TvWBJ&fj)$_r52UZHkZO+#8jKNv|sc( zU2?uMG)BpqH4ez0R}@=di3~Cy>Q&>NaC$tx_`*@G>W%+^ZLyNj6YDyw<5H(7U2KeP zku5q6#1A5bHif9&e7 zPzcVM9Jdg@*BSyjxDRwT9_iz~oiP}oU@d40nOJwS#UD}!SMOTw{UGAqS? z1Z&QJf9h73qV5o37YEDP!2rE`6~;6A`8J55uavLhW=>!Q%eC}6j1Xnj-1IBxDM9)D zhr8E9;xxkNL4LRkm6zXvajXCGk?okt#JBUnUibMIV=T8*{`H0-L?oMj=Tp+@-LUW5 zN6zac$jJk9qQ_%y*&T`G&XWAMN9jcO2l<1x!r(|s%n9>dbCMu%S935b$7??~dgbSL z>yu#fUg4;`P>$F9$b^+(675a+h1S6Rv`4ee7$e)@p7Y>(uvj90znt)x3?WCz!@3Qk!71T^fl4@_{?2#zdKR2jAG@m`fu~)_i&b^H@I}PdY2?8 z9$-}*J2!l1>9-wP=G+h|%w0g5BUldf#ibVr7IumH;1!2H&Xqa*4kss^95hsASl%ZK z6h2OaABOThiMU>~K&-ZW!_Qax)*EtFfQAxyyI!wHvz^a$zH}1HVx$B(b3uN8=#=CG z39xoP^4_@pClVT!&yaC`r=&o9!GCU(SJ9_!JcT1x45==9)CjPaKaN*6+oCmkKJw+c zH}eHMs$TI=%@yNK+k8?0|KHSrH@I#8boWtTk4uvjNH-sF4Gtf}#2sk?17kK ze?QP7SVK9p%w^fVL9My)Fme#4*lCCe6^Spny=B{q`?h8N z?H3maXQ&fC5^B!j#bod=r1%O* zHeL-e$@@(wLPVxs*ljQ4w$aQk*^c_hWWZ(?_S3R6+QmScSG>o>^QdB%lfBJbW?OzR=FT!+AT@iD4uy`I_z zV@#W6Ib~D!TWYZ>|J;>{Dz2WJS01|#nIXPnqm~vLtAv3GUiLgk^{n$|H>*we5>h2K zK$NXP3}2qL)c99Y*eK3C^PuRY|9o6eSDVbjl3-`#K%35*hQ3d-gZiqi-(6r}_Br5g zYNA>akoN{+gotCfZ1bOop3}PrMV@#*gb4|A9a4pymdt41>w-*yuMz6f{}STrw&16h zX|ME zBr3d{LQB>^xkfTGNQ8%`7i;)k5`rmw%5wfJsg&Srkb=o@j$$!$t~D}_EjB@ zbWz(}YgiS}%#focy`u&BqC!dpuR2@OP847E$940*!t{!ULz~U9l;Y5n;5FY#tQDfp z0t-Mht&7UF7G=L~H&?_N?!5xRDOs~=V4l~05K@v!Ux>w21!S6wsVMh2%@T7Fq*7TXpUW5IeN_#9FubtS4j$$QZi6jEQ=&ilxL zef0K;RTa!)Ch*;M1eXR-CHG8o4JPg2+P_WK3?+Gm&_{+jPL)~it@qf(CPK>3%Oz^(0HN^oVFG{Lo7wpqe^ z@OZpZCvB2ctyaE#>@M~}L4+_y=JWK5W;s2N?ZszIxPE+HqZpbvTxU!^02J2|;>gx~ z^s~w0bIow9HPv4}!bR=8@DgAf-y`wfVu)LGaP%`S=r0WXwoWx1-pcwPYXGy~SHPF& z?Qu-Cx8@fpU}z_HA5Am*k>l3o;G4fM0}T#~a%^Fe_w3Zj4>|8F4MnTwK>JWMDQQ# zL2Bbv+bl7T8%F?rKmHAyLb%{t|I{<0`-l#_?E)U$vX_N&hXGqyw<1{Ev|Cxb z%ln{yR4zgXE)91NI*%@p<9@h(uzN`5f+#&oJ+eK@KX^Z^J_LrIzx~Cn)wEz%LtusA z=GWpH(c!Li%9p$GcyoMyUAC<+YdYRqB&Ws3P%|p}S1+jIJ(pJG_=CD1#`djm$z}w`RQ7wof z8hk0=us(aBLy~Re?{rhzu4DKGDy^&C|9GFuoD=oR=d{#9)WVB#$}R1Y9CO4g$&a*? ziLy$%42M6t$%Ld0KW3#ker|wylSu2GR`!Q}of*-Ui>TfD(k!#Oj>UyBq3FU4>ZJsE zb9AZzg>~yqMyz$v*M_Yf*&WbM(hlVBNgVmb*Dj=L#&<;!(%Jdrs`%q+D>!r|g{%B_ z;vSDm_`aa#J)#Ot7WSFbdVX(ZwX*z8g$?0 zy&*gwBe;YUd&*{BU(Y!u=e!{ehbCbi*2JR&n*BYNy-1x+N9%VG~=h@@0>MOq~;J>#_pU5i1Qf!Sei2fI;RYvDjN=l z!nO)8jesA)3J6W&ct%Y=5tFR~i+@VGB|JA=sRy6r|7j6%N6CEBKZqhL!ks@YP5D^3 z$I+TGFv&ME$2i+u%)L=LRTa@Jj^oXeMMbQHX;$I*+>T?D(*;uO z*vGZFQGL1Mc#7Cry}od3K5S^xWNS|DWo2YfB~c+4e;7-{mw$iy|Et%_*r<1US;hXJ zIDayE{lOM=gX{h1Z#pBbf2GKtR)H)hXLL#8d>)+x_N*HH>x71a+0XPUAM4j=HRzT3 z>$6IKNvP1Pmo7;;6@JVYbCqkD;m|gAWwCY8Dg7xCF5@s$pD}Bg_0Dh!^PrUfkJ*yz ztg-Q1bp2m3rM9eF=TV36joIf7lwtORc^nS74m|S~bq%c5j=PS74hrSh#toA7x*KML#fgx<4}-fK3WL;%g8k^kDh6}e z4ls9$rP zvh8j40{`niPnOR|WR_M{c+_`qBmB(X#-cYuzL8V*DQj!Dzv78;AY(OS(-Gx*d+{mJ zC%S~sj9j;mSzEWAFJOamow_R4;4JeSMfAINDVBdRg)GYL0_6R|+NSNwZ|d(Hny;$W z$OqpMdAy!|=^krymRiCNqiX1v{hjEi|K}U(t?5}u38w-nvOoHF=(Sn^p8{E)lVl8} zZ})eB16v1a!aItyzt#}!} zbBlB2p&9f8G;pV4Cjc5=sarW$87(AH=^?*ibZE3|gaCfNGi^Is>e7H(S0+?`tQ@c8 zuN*YhYHn=J^^)HPE+ON~CL0nWYn;k4%lc_9T4GHgt&MFy%|5L@Ek13zp2r@?-p8Ku zh?Ap}jgy0uos*N3^$9Qg7EmjwrEXRF1a|V|oAu3Yt7^+@>jZ^>@U@;jh8Xzz zWOLrtvf24X*EvuX8^SVRi?9}$F_@se$schlV3)}{le`U{0j2qJtIW26QGRJ4eLn=U z@)EY2vjc|Th4^x*+ckhmfcpOWAf|JLBh$^mzP9iEp@dy>+>Dwsn|qq3=)M zI^Q^y2SRUjuE%rdvU&74~I|z2%Ic2ywe{1NHd~+uECQfiehPbyPW-b5CaNPC2&o_$Na;=ia4S zAHE#hYT|O%!@ky9w{qFTZlf9j7nZCEyBhx^Gl|fTRyqrxB@)IhL>FBo()cY17F~~g zd~l7&PHcQ@Y+bn9vQ~TbYVQ`Yj+A|Pjk{B;w_SdL;y`70WGEw)49W+kbF#ad)au(R z*@9|@w)r+oHv6_nw&scE2{mOHkF#97zc9Su_H4VUZ|MyfSaGDvY+KQ4&GVsdMYZa; zF!?I3!;WCPX2(He;AM3D9W$!;MEvOe)k{#CtEGDl^~eRm_1yxQ8vOCA2Iu*27&*%KSVTprhJnahZRvj)KN7Uj& z^-YI=-+h;yQ;F%PtV_-=$fd(F$cj-?QCurMFT*`EFUvi{JYDd)B%#~O z6}IM#n;-r|buiEAi;`Vq2oA*e$b)gmf6Q~FZt4PT|8uwAzHS#6LIt6PZ06SGa>^+J z8-YRRa{D)n7b&~ckeOW1Tzk1rAin&r{IVRi+`C-390VAIazWj%FRtIr)XjKK)s1=% z*DZT4US#ja+9wq+)IsYY*8aJY!*%dkIb4k$X@{>6f}#`uDX;XNiy!-!3?kujf>{C( z-lFA;e~x};qVl=DMf=iLKzY@13&^3}x7~Ew1OHya3kJzvdCoCBNS587U4GG#i$=rl zi$uezi%P@ci(J}ce|Y>^fQ8;wfP>yyfQ=q9z~xphFFgoSY*)T3Dmrn|LW{WRDc`x7 z);JBYgf3|n}1$#3f%}yV7(7m83(4sMeMmJwNuIyX-vp5Idi5Q;^{VB6- z>ivzdA=1tm;tk&^?R#dK7{zz?9@<`HX>avC4817y-}-9v0rAvrWZv6IXYB27{Z7-N zUet^p^1z>3-hUaGt;_=5r}Ec)bLkCyt60o^D+{&?5j7z6iu$HJ@n156kOF(+i|_(1 zbCeWo|9|!=G&|+5qGWkv$tFHn{Sh^a|2pt3EBjaf_ymtteceBr&C8e?&Cce2THXQa z%|1W-_Vl%K4h|WLJ~sQ`=||-PMN&WeKHC!sj~nV}_;SS2P8eAg)itJXm$TZ*>hv+# zn+ih>wZ=T>i-(uu#NL5oiW~*wjB|+k0_@+^bSYkL&_p|FWWBV zZ$AZAb!fng;T4Ki@InMse-9T4LQ-G;x{bRHyj8zN-!h{y(6(r0G-Y7JL*~QxAcJ57 z#otv^Ne4-LRWlmL;(--G8s=3@Q0Trp4IrUX#lEm@YIx7iOBmRk)B^Wn^6fkh>P4sC zMce@jyJ(yUg`?=h6D&`dRuo$yt-(Hd9*;KhA_Fv%^i4F9wAKkz15&7}6Zk6iN^mQr z?eWXwPKU#oC>7mB!g7Lf!g4(J06?Xh%q@a0S>+p0Dn`#x}|3NoA5t6 zUBXW7#Z$|_r{B)1YbN}L44lh_=jrDXIOB^GI1^r5mz~9*rPPd<*v^U{&>$0*RODIv z9V~2UGX~VGBZRwY+*fQ@9OW!l++9Q@=pc0ORNM=zlASLlZ)wo+a@HYttSI{vtrhcT zMW5i4hgtMr;l*?}4RJdXpGc(A#wENP_+%YJ=K5KLhmKMX<*x@kkz|x?q@j;j zO~6gi9kAOJW?d|CoE7>J?*|Fb#g+4=$BhRJ1X;IvOnwu!N>EXGQ&?l4VP7gez~BtC zN)#Q;q8N+WVe_n&?$3786fMpY9rN6&3J8;?;N9a6$dq2r?!7w?3aFM&BiRbSCiP_O z%!2pYXYL|*c3$T?!UlpxgLkQK*n=|6*ZWtd-$T8MJWZh;lYrLA9N_np>DAT(ke3eX zpd0v&@n7`*`VL@+#j~cRuYIO{)Wk_Z6d{2SPCMkh=J1U4uWFfYhqS{3x&kImY%7Tw zDdc4Q_~66POcGhy{b8UfxY10@JzsMAa4!M*5?la#>;4Z)ijtB&03%JnNx8)&v$*@{ z%MM_0U`TF7(@xzbBT(Nn0XAHwc>5*1fQI}(I;3AkRJ=4EBL1>R#)3dm^_)dNP zzyVNSWJFCq#-A2zw8%=0EF`N-vg10vLj>19GL=gVn{n!O(bcfpPScd5rSyU|%O6(^ z?rv&se>!(68=!|9;9li-I~LoLOwOIA1}Mw9=!eE`vnqjI(Hs0@<&H$f9;owfx{iAs z(!LR<3z?MN1KfxLwZQaCE!1r~h{~Kv*lb;OM|+O%uIZ&Fkfi2=d$Ij_vPmx5queUI zTA^QN{A9n@y)f_45lF0;?s>`95ks=wYvcU839=D>LkgDG11ppVTGwpo@~*aC@uXER z{W@IMUixzEyRc%{^-wt+P@d}35&*6Ee;mi1&4((EwBb0!d+;iEGdE42s$9bi`BvY< zrN5rVi2#x5xkj!n}n!tZvvh?bL(*;rtr*{Y{FXgx=2f#HynhO=6F~Vav#Iakuchl++*D z`a|d2IpOTSES_6A{_MUV3U>zwM~Xf`NbV(VZTfrs!_f`S1y{$9AN~e@#a#J@r@3YP@(U;(^AiFCzqpRGC$VorXsEHDZ|bX~^Ju*dfv8jol?? zEj}qq^*!HwB2yIENXvxS!Qinp#lZJLpkK-vdI5STp0i!IR>p6F$Lfemn74-mUaB;_ zDP$`W90tg=Po^=6zC?RAD@t-S3h|T7rBUa}@@X}G3Q)655Ik)*xQp@OOF3?xWDm6+ z&UvByU_#YX1$KQ!cI9R2t zQh6Hl@7B7%k+EgfSr7B)hDcHe5e_ey)l%}_5^&Gt(JS&P2r>q8@r&xGew=Su?Jx!Q z`wkyuIXC&_r`ecqenMsO=ri|k_{4-aykQzsOXPOcTVCzzP($$@$X-L+O%B7+7=Dg zj+sgJGw@gAfd&8L3|U#bsDRHj-czkMO%_N1XG_1!T^*Nxn+1C25ip+%j!f`d z(K)BVCESh0S4IOTl4@43Y^U{sAEM1=Y(B`ZcCbL)Ey_ zcJd6F7cV_cPsfY8%m$yj6We!@?F~^awxK{NQLFHC($Ofds6HPtL~}tJYONs;DtG*p zC`Y{%(olCgcQP0)Qkd|OzxyLs(b8fwT-L&TAN)Ls$A{QQl;crH31bZ%Zqj5PaZU-A zS~k2GhheIq^zgyumTiSj#H(<=;?^YYGeDKU>CQkuNS*Shz{qow_Xd8-wUom-NAdUe zc6_-_)-fyGNO1v|%y>R3=T!4L?@*I=Bm<8m(vQ?-Nup<2UW^K3G5ZQ)vYM>}_V_>wg6?qRazmmJUVgit7ACmM58Fk6r&!xdq0-!%N81aB4O z$l*p=`{0XRq@Q^$Mg_{f@{+pN*9e6KnkSSlb#Gl=lZaAH^TdWrJ1Lj_upZ@V4CqgM zjpiQ>>wk=NxQ<1Jx!oT3$%!Kt>T;*%%h5%%(reLMaA7 zRzZiYf`^d>Ep~~>!s2KsMjXc8WSl5}%GLw!)l*7-sL7yFZpPBJ zM7gWg)X28eoK_d7rXm(4ss}dn-J!!aks;6r!7Mke#A= z78$j{KJcR~OY<+g3aC0LPH{@}{}bw8av*p39FZ~&@Gm`>FGTDj1JXRVmWz^%NXZJS z=aJC(82NOU9T-qv)A;DY_{gT6UvjK0G*}$MjD`)lnekN zT})qVahEz40MSQf6iGtmofHm0s9bL}dMEoxyIN(Q7z55p*&K7rNXeD;q*<*w(4mx& zZLaW+pktn)T5+AK9MKx`8(VKb8G(HP~YbOTJNu<4C5 zP_hFiXkrF3s};1AGD}nPD$Qej`^*6uGO|>a#|p}`!}NG%2RM{wfH)O4-OZb=|QZ7fzewtO1X z|B8cD1{{(CC3Z8zo4O@-HOPEqU1R$;lLTdbzekw;UUrS?W9_k9mGz}P)xk=zF&w8u z93H5#pgUE;qH^2pqG5oVQ;^X*T$PZqI$V{Jk)kAu>0@!S#iNsM2ieya8>m>rNv{_w zKdUy=B2GpWV@Di`sK$!Eit5taXkgL#*~w8drg3P^Rnu_b%{9_A;uOoMShGu;QO3uJ z9){sp!f`6A_Ry#=8Gl~?SWyd!($5S4sp`ov0!qvQCBJ2D%NZzXD{9!1%YTT-a8lmV ziRK{URn;G8W!+R9YiOuSL{wM*kd#Q&RF#pDz^)HXp#k)MCIB69_e{h@R#c<&8)dI- zr>s2oz$aNjW#mhA2aP%DfCd&DJVxcphX!cBjWK+l0q1)1~_^WC8OfP#1L1cbE6jpFx{aA{MFV$83s#>HR6e zhCC)!!3iE zyX*GyV24mg4$Eg^cFR|;e-M?w#MMxCy%feM-njhGngmS&RsX`SCcEJ_2m-)j*HHAm zUB(f9#ZGO0-PgomvF?sdJOA+wy8yY}pzp2m)K55|(ry@Cwe7bYpPwy`xf$%-R-RKi ze;VB4R)4edS$bxyvRgUjzTaLnvh3yo*eR@zmhgVgyOkcvuk&(Rq3Ox@uNF3@|5R&M zoSLPU`c&Iy0fE&Hxz$a;>c!k@*58C|t3p|RREP2-S*;$Ts0t%FEKZ`k&|MO{;mlMl zzw_I?^x%NG-_$sveskG{;-#<(9;fHH=sAtz@%hV7DU9rtmr}S&Iua$bcb?AVnCL|v zA=~isl;zt5PUV+ExxzXUCC&HPZ07{(y&fHKPEMYN-E?JJ9v|3GPMt1t(3c%ARp$^Z zbR4@!1+|nKxVAf9rp&k54Cg57@g2Ea|4vR#=u ze*P(0mf4pX#>&sYv-W+`7Fug=?V-GQQ`N??q=Wu;CuW$wzA=W=)&g3K$8h%!QAlH^ zf1YlkJXJ)6Oyz@yn06dzk&&-|Y2i`!oU?v>jgd}}m4ZhVDdAqC-rcBJvcM?(gK&%j zEu0X|q_or_!W#IupSY{whm>?uvg`MN6t-(e7YZ*g#&JnJrg&AqZu4-Vikr4t ztE{JIDV!_gJUxL={daOQqW6pQ82r#N1`~eOup=PI908jYnC$s7`93HZ?@Ap z+_hs(r7qjpJT_6*B8Y%e;1qNCS%3P>3BTd<(T0Wb-y)r^RQ8DrUGeu5mM$g8AaL(Af%RawLt2?9*<+ShuM^?I(|i1B14*m( zs^`osLR-l27;JXir73;uh%V{KS2Ma5dR@DQeNHY1aIC6xZ{Tw#vLAjLe3rgt2W?-{ z+|6gTt8yJGJG~N&5T6lC6#dP-o1uDY|dXukg1g zGe=&{S%7w@Xk-53n#Kywoocf0!Tl}I#Avdht^8R}&LgnG@IgnoB_!f0fNxVI)|yrQ zqkHLEcNwES^-9!_si;G)_PV>EwzE6x3ToU2MmK=6hD3b5wlwO*WF2hS4kZ3-V0Wsr z(F6JY7qqo_V|Rict}Atp?h55QeRSS8mSkr`B9JKffOsk8#1~xk{q(1Pk2G-p`#nm4 zRN09iqmEs*N4}eTUP94hRMC<}Zel}FV&64-38x^ohn{MKo3h#RQh(Y%0S(~Yr7v9@ zH#XU|u-ybRrq!=aMM|5|8A=<#A7&jYox-ZX%5ZS{i#{W`sZPDM-ow)YrKUNcrpqs2 zGQG^M z%Op2$du_g&_Ht?kM18uLH=8lj9V@fi<|A-D>}oKX{9RJKP*oK%6uBnuoi$G@?ypa@ zR4lVUgo3}|G>Oky-M_crNm{4Uzf>(_x0N5LVF{-}=3oS!-u+fI{_dSfA3Awol6Ywt z|7_ZcP8@aQ^+X5Vbf!Yh&(o};s;V0^eTp-I{v*_u!--@$(D?3Tk7&b&XY-@H+`3lG z%Qsa2Lv|O?>FW~N?{}Ni?^ls4_%bJJSi2jY7f*!N9+t^8(4C2hz{qAwPx;y()-T3y zDCMo5=^Pn__v;U@Q5$=?s5?yA`sICis{uB>Buv{zAEs*wEnQ`{ho;w0(^09(lLP^K zu57WkQ4p|YbTlAAR~v3#zqgv8X(sgNrHRL=Q|=lnTF~WY_0^+skb_s}TT8vE?_vah z_R&>tnm#LM`}(T7PL}T2s#nVNQWc-|PVhk{M@H^QPeRC;)DFLTSzr_IgD@o@0oBkj zLfJAp9{OT_;6BwKqW1Cfw1rNoFr4gRl+G?M3_`|s?i-J>Q78VB$DFfiWc*cAWmVPl z_77>VkLdKUN-d>71O%shcD9pU;N;iui%zR`y-9~qJKA^uh*10=%=+b`?yv6@}F$?KH-d(Fe{!;@;2_1e}^~%L0@Gi0s+upOoho7bp)vaC=N* zlV8ymDc1&>c;Q(VLWpkvHO*EmUnqZX4gEWR@5Zhu^Gqi*cF=bW6JA2eOs#R(OcQ`8 z{j+G>8qG$J>-=NY6)aH_qztKj{#(WsYR1E!R2QKL z#6~K+rZ(d~Wh*t|Ye_->1tLt0Cod*hx8-WTWAPU%qN4To-_?-|%gFWqws{Mlc&_*1rU~GDMGUo|$I7M{ zQ+=`WH4VB(foY}`Zm(mDP@Qk4=AZWk^Z?%Ga^*T1gCPC*r|6sfoAUB#=Q0(>8rA*0 z=TS~yw04`L&DCp-lqLeF+(YI9&)2!AEM&SuX_ z5KCCUN85PX7`DcN+&@J(J(-proC1@Zlx164;m}qYhOEwl*+&ckNd3P{awLeN#@bPT z5){&Pe58(1dMmz+m?uClXNX~_^R?M@{rcaX*3=2{&tPUA~~G_#Xm=@QtI{LP4#EI_&{F6;TUcS5IDj3n+a_Mceme)u@T zhttDzy*e`8x*Gez$nu#N^j+HBQIVS8Oj_QPX;?u5K;|{z*B-1G#R|Ptsi!VnRR0GH z&W<_#-7R=0#c#IUl$8R@R_nrt;tVRF`CNvmFjyjWk=|Cr??Gy$JU zO7xls_u4Yn)y%w&?|VDcEeC!rn?B2VMs#R$SYDN{#NZl2JGmdai?dMiwVayni$|>R zcCVyWmNwf+{5@vr0M$H}H}lgJnnth`Jsp!$NH5>YAt4~SNlIu{5IWr4kScwPWSjSR z21q|H0X#uO6`@`Sa@x?BCq)GD!j_}(3x%5F=AG1&bD^|9TX7Z(VS58@M{_GcB%T}? zDJiD5lx6w*%}EeKOIaLWFM<23%=!S_qUfA5^FKS>?e*1XVwIJ#Jf;{L`ETm9GylLZuZ0xHE@ zXfoHy1phe3QS*kt48Qc>@QQ22u*6=^Kcr2tp4@TwR>FHdRs82sI$YxfsIkUQvzyz^>=g_Ed;0F#33W2<+}27XPwW89R(kc>=F*|7=E>s z;#I6M#pY@rKSEd60iqwqU+7s2yQB#sc(EO1Eu)GG9T*r=XjGLe4#cWIKxD7IM?g%J ziB6D1o()?W*CG~Y9?1V@``3-t@ZC?zU)6P>WZ&{{ERs`Zg;xSRHa4Av)Q8X?13h~} zEj&TLIU5EB~mh#1K`7C!-@4{`wyVKUTT-UdVT15x@x)odNrV!NFDUzua9Etb+XQ4>!g^891jt5I1(UYwl`zvVWICW9!Hq2iUTrn1V$L7BrhHM|sV{FO=iA;(PcPp){D7}x^9@>F z5AvBtzqyu^{0>NVnx)`2U9|CZ&POTVbeYL-mg<1Ws;>` zNN`X{o6`olgr9VAK|Yq|B)?KKV~f()tgKtiS6}M9GyZ+tc{sE<1$|`0?M*Lptz)x! zw7ZXPv)9LX{mf_U2sBh=P)+RjvW|7Q#H%Y_KaDp>KkB1hv59^cu2IK7z=+rn$8MS{ z(0x~tD>pEB$7Kk^PYI%NeIDy>3iV+>3~+0iPtJN#;PLhg6UfQe2EwTOIvHWM`h>9` z$UJ;0!x}2e>P&vN>`^Ff{k+ab#+^DJHkWG>xrX&9>Y5!C_^iQ>cDdfqe$!X; ziHdVq3QZTb+n!542ZPV=Wla3H#c!hZSNTti$KO_Ue6c-)n^fgiL}<=v_`+YK3C2(x z1TLh;98GB-`7F4LAOaRrE294rE9z?gh1M5&O9!g0*6n16v;c^#wEC9Qa*Yjkfpi^| zPm)gt7A^In<|^+;TxP@M*R*{9$k!At;B*?AwN}ZR_4vv!SnL)f(>4b%*7nC7B_Y@r z`>UT_B|S5~Mkji{v6MBXG$JJZxh)mX9HRIpTE1S-0b(^zIb*iR@Or(~Ur*V_81Pbn zskwAsIlVImhl+;AlKrx{)txc(Dbow855#}=`zn`;g0`aE{@sZ#B)N7x*r^vU6xDo|>RpJh2JL+_Q+G^!zT<7+7Jax-J zJ#y(X-J$D*zK1QwOa&Sd zl(v|kES&op=K9eX*=WmMPAQc!x=N{+k(@rUC3@GfwwXo3Pu<|6hDeVLe3UQZfUkq3 z{)S-eh{PGKvN%-xNT84$x(i})${+3Y8MWeSw$k$XDl<~HHTBqq6)kPo?^$EY!Smb= zN768IlX{U)dg^srdgrR_SrM4vluEg&R|cON)|y)B@@8osod^E47btoNyac^!IOSYH zp{GXI)`uvBBI%ZeB>J@CSh4c-}xap^A2y zqj*g%!{urdr&@_Vo~|0fA;4ibJ7fxbt!Z_Q(A-A9+Epi{wnG7so(5t|m#HV7JivN3#Z|^>AOFUS6^r4UYFC+Kiiod0_J^Rnapvrr zQ-(~PfCU=uh6K*tT>-WJ+`UTRc*xKa_Ph&v63MdCV^#8%q|o>wESQkO4hO|5FXOm1 zgZte2FvoQeXEV!nmPo`?}2kHtCD3b?yddo}TzjHz+;at5<1sot)R2Df)I&cI3Dr z^YtvG-X4unPHWWk<*^Gp8a@da)CBKg)V5NUn`jQOaOX##Vlhs$H~4%0k_HC2xXQu#(Gty(W=)67lu|JuEG5Hw&5@wDh6w4IXd44?9dPWOQcE79oz7= z5vTHi&uJujV{k55zUEjP?`Kfch3VRxcG)lCs0apaQBx0I(*ld}Mb!fP1!@>8F}6YR z{0%nM-KM=&Y_xIj-m%O+YA2iv4;TkaCTl9+`lFnX!E%C{u4pTiQcjr0qpjQ9#YMfA zn$e*W^Axv0EjvqHx3f>)1IYakYrahtSDQ^m!mXZHsto@B@&-x%{xn*ngs0A4eVv*= zwGc`SkUL189U6jdJm~m}iHA#|r=tTXpx}f>FO_&CgpX9?514jBj_9Y44JU&I{}YaD zq<$85FsI4wZfK`F6~Bu3)mINJ+fNY+9a>2$-TcJ`S11^!!M7H!Ne9FVdUNFHDy+F*2p2QBHGfL%UZ_S!-4)R76 z!|-13Ay7R+_x&|#C8)EM^}vH{hdU}m@z_+1y?kf?J* z&-hI%lrS=`IBHX$PC8K=Tx5woJ}*K4tHoDxLLcTb_^pmDJ!D6n^yJz)p*o=kUQN>* zY=9xg4biqG%7gj0M`)%8l#wP=_VGlf*f{bbgO@k4z{=jQMDlNzv{eCy^|wYm{y&r9 z|4}DxR|m-U9-fsdnYQvLxj{ITu9vv zTwH`>L;6hAMZwJWO7aZR63Gi60m6Hj0rGol0NFirI$WhIM6ja`6>pI(%b=m z*@k)ha&sIU^z}(+$06VG3#7^8Y5D9%FW3pLo8XCThxrQx`3C1ERyY9fX_?@*w}#gr z!yDC~`Nn34`9C4Fx7dF{*yT=NAz>a*LJPa#ueDG?|2^^F$o=aV*#4XI6%q^JZEgY@ftx4HfSbwtp z?*l6-2R8=~i=d#JyQ_tX0}7&d&c!>Nx4Oi4p#)%78zq149%=d_`Ck zsf!98LliDqSp)jN|3bcmM#D#cLy{1d`43s5H#A08<$^Rn6h&EGJUQt-x2=OfR6Jn& z8vHKw3Gei)A3x4JWd~1WxlK)U5d?n;hHVHD(VZ7|l`;L7e!zt?T2@nRx?7l}I6|-kf8nB4Ec7UB1PR9PHW!Dhj>ELrUM+Vlgy;v3Oam=DGh8B7rsK_k+541sftYOq$M`ri~YBw)P=6!0U zP%!MpqCtcJvDPo-%Cun|JCqgQ^>zuzBcHzpL%FmxV)M%2CCO2VGEbbxT8Ez|BD(d4 z9A5W&lN2Y;Asc6I!MNZh$DM@zB}YW}iLQypi#F75#=3=f>PKqEf*tkhRla3piG3%7 zorUT5+_jU&q6$Ol&-EsJwd*S+Mek?L%pjFAS#8lj_7O~vWgUAM_Dwb6I%?@{jf79i z5dEPlBu-i?Wz47dl$m@~ul3y39@uxoK`3Q~Y$Ubz-|+AFM*33lzZ)WW%%#y{yc!eS z$Ke~I5rQD(^Jv0qulVI&5EbFR^8{KS&!I(~5$2q??EPzi z7)prxj2S)j+7UhSEmEwN@bCq7ctvLeVUneWv=8})Yq|HcOUA>>ETbFY%1*d8Gn6?y z@jxB$rSJ!w`@pM2pcsFG0C1dmOq3!0+-!jab+*qZ`rqGFD6K(m*u;&Ka5rFVqX6h# z3PwNUZ~PbYC=p~n!T=l|9U)*cyW(s}P6&^PCKa!TzWEFHOM6M1*`N41^I3Pt!EJSqZNDUw zMy3Zt-L=O6MWhN?k&UC2Kj2JSspoJ;fDc3XZ!+G5Q?t*) zhha%^ZllXrzj@s`qu$W*VlRor4TlhC8&?R|BG{V zgJBhM7Nws>UnA0@MML0b*4W`3xB%sH2J$vm=ZO@S` z7q2e}<8slW3(}ENv>UautG244&+w=iZqDpH$2t#p*6Y9Co=JR6bFj_cEZsry$IPv`Z!C6AwKfHJeq~CArtyeLLU74+ppD=?33_*Mf=n z0xYhm!xj6v>x3_-y9)l2$`^?z@(@N#6m)a;qq7$wxN0s6iF|*<=^HRY-b{Cl%cvFW zhE-F!9xNj2IeSrq5B6Ij{}OqgQdSgqzvmPBwf919q0PV;QSX|!mzdLC8MP|XKRw(I z_ndsXPaF|tzBeu6=tI<8>!;O^~>xH{nvaxKcqQr>c7T3eMzq{)3-(z;#2El~FMq-)n=)&zP* zDA%E#yr@1GG?8X30RpD@FM(?Rly1UyFX$OE5_}}Iui=|-k&oQ?<2)3}0?@CFum9nC zetQqAZB6{bWkNUC8*Oi`)ZEYGi!vlLIZM^zDPYk!%{u5&7GgEy7S{0?8tre zI@{7Q=^bCUL`+Sx?=UpHzH^EH@HdV$m;`i+)wf%^*ma`15=m14G+T9a7Fyj~i}k-$ zhRd}bj<7U`Eli$4HWZ!cb!Pjd(C}@zXvgyW%1m_ViD?w=i-&@9hyul7%`Z`CQfxgO zEy7+Od!9CtNwz<3VQfb z-F;xTOk4auI&9riR>&k_%n!34Vi@Rqj4)5aK6n19o=0@fTUMwgsO77L;FMvr6mxaD zDpR1*=c7c;;EYq84FURRBE@*aeWE__zjHy5U<(uL(-RRaSUzf9D&Pj(X0!?IplieI zP*pI6B7L!Y$MGK&teMgMFI{JHtz*$SpC^@%DQs_>ejd~7olk+M61@!1rqB|mX$d8e z#e*Jcipeg+IL77dYFFFY47ew&Xd;u9q{pE_b+gMWC#od ziRnS4?$f&>y*?oEzF>@&0$39M{S4dqV-hfrVGO8f3HD{q^GeNC==W>=M$plR8Oq^n zvmz|YedB&f;0CPgcGhdHDdv7>@^wgx?IoOBr)N>KBc+zHKwn(+{fvwD5&o4&LgCe| zSndnK*0viVF8I(Bg|^QFxj)eQ9QB4xI?aVev5h-U+WyehXuy`Ww&{^1vME*1Qzx2 zR@6~(cirkCqCzJ1cAduzl)|!KFah!Gd>!B%R0kDAA%YYf5XZ9n^>ZCR#=23oXFfvn z_;mL(-jjK2lte+m%2!R_Z_v|tY~Ir;&(BybTB>?xguau{E=tm(jSu|Xr>8!OfRS`z zHO+s(#nzfzH?K9EJgUW=exTIT4(?*NaYlwH7q1F}E0nzz#zsMoTdvghMQ54`|Mclu zQ`H8QZ-3L&2QOY{yH6?Jz7LgqtkS>Xs0aHu{!-7LEng6RV+mU8^r`JSsOQe%L~|Lz z{@mso?{quYYiXwa_}XLo^jMflF1VZ1s(!;V=mf7LYS?3V?-I!Zv?uf=8j^Qx8k!X2 zmVWk()Sv&!_kP z&ao{IF)G)56UOOsju+?j*QkE!$2G=jXpcvB_#yV*ZFC+;wETc^Vh-wK^&iEdRkE#G zm&6mb-JGeuTSF6xa^{UU)-lb6$H#L~6tFm&s6_~A$xltHZZJ(CzL zg!?H4u&YIqZ-Mkbxt@QWcZ0;byb%G-KDTBN6T;2UwPTqG?ySt|8=SMz3x(>2T!&`+ zyuQCv>{7`Kf22#$F8=8+;6s|8H|E=ZJ0|Oj!89lI_)gxk2YQGfx>jv=c_6mRMa}-@ z@B@o!_^=kwS&d%T^kxgIpb}8h;9A@04+zFeZs2s0` ziN7F#3nD+d!ym~J#=S%g>afPa!#twyg;N{hhgxQSZ$Br{C(uN4;{tG97P9k>BXd`X z%Dt|GBlC_SsQsmd-5ODakiEo@Q3Gvew8BFzQbR>5Z^H*)Jsm2_}5S-r&A7uEuS& zd?h6ji4KIlmdq)jgU^+WFT5^72U))yV0m_1#E(KknEdRN(1Y4N<>l_K@eGGEz|Wtt zov4sC0MoQb%JG|qOmxlA9o>gMd;rHIuj&UOS^&Xp_$kicn`48wtW>K+$7J53cWsz< zcdTVMY!2165V13xi(R*K{tkzJppXoV_6PI}NnTe60|i=CaAJmQP|>5$m&es;AvN|q zyG?#HXEi5+H=pd+tmTUK!1(3F%0o3=E9fo3PO#Zb$xsIxsuGEzZ&lAMf-b@IP&58@ z`bkhh9_#I~_nXe|Wgy^6>`*;R(lWbObaG4r zM|2$ddJwf~H=q%761{VbuEz3?IpCRib#kf5y+l>IseLX(5}O%ntX-3VsFdlxJiFkGqCMMJ9b@s zs(aICg%W+@`ws#=`(Bf+a~A9S%60Pu4hqqZ;Kd@d+CuzD=oJzXjU6q}c0w~VGXB@p z50wE&&7&OkHEP5WZTInd`Oy1X>Z3o5gF6mP`0?M6Hl|bPwa^DbD;N&-xg8GEF(TvI zP|){zugmG$yX)O={k%|gKP!0K>`GdX&mPamihjbl?bvO& zc_6$NfghWgjs~Om_&RR{zX|}pJ;ILtJ09w2=Jfa=hkMJ#MZL~x-R?n1Q%N0H_&ZJ6 zDYzUFDVXD__d(cuTS7illsZ?J@Y+Rm<(G>U=(_HKT6?9IKyZwbT==@V#`V?-bx5w@ zjV*5xJ8y)8Esxb{w54bacmGYLW?mF53v-o%tr&*+%98+vq{ z+7CM9BlMQg3O*dEi=`Vmpi1|8!}uAifMn@3^2F*1%V8yqd%5{G5GANKGOQbQtfNej=6)12ldgq~ ztW%KqjiFU*9Mxo7h~KXUNQ%Pgs}UXWsW3I@`;CH=Xpy0Va@11&Aj$y?T~4fTG~L4| zA=-(zULPRCVDQhjs2B`nbmF0|kLlLy{p*LcVBiY=<(Y&vGgVkTP|ujJExLyAEx>gD zqJm^(tHFu6m>*sftpHVmIku-~x5PdB_1$M~I@dD14ct6=nSASU^x#tREnn5_o`L|c zJ9X8&Hm=9(y~#vx-8tK7WtkB7;&yj!xAo~Gxi-Q}#nAt+qSW-u<;b?xzU$#F)G(Q0 zvBW=c65iWSn9ETxqU0vurl0C^*wy==Dx~I)nx%#^4MGScuvr6)x3Dch|Iz|x`?wT{ z?-_uMAJckluRncyi9R?DkA22_1n9+nZj!eKIel}#^MdCA>lwWMR8A;Vk?Og)UA)S# za_%AyWf-;0k)rBw5IU2s>4+>M76)$68=~zhh-EB4$A_K&2DhNthr1|_%~PNd4Nv>4 z*-hElO<8yK$gnKU^KX#s&9O|i9v7m{2(*`EOTjtpJ3AC|?aQZ}7aFr++b~0+o3qwL z5)%^vqQRh6DTuwZfYnLHFhB2^3OI{wbzgUPRs*L|X4ajtFi{GPQ*z7&tuMyf`l5`) zt)DaNx(cNu?PxZ&b8|}@tv~M=*Rqi_%O3@_{qHTiHvAD1JxqPG7d@=Ra?g2umo0d12LhQMwKXifUfo}(C~8k|pTfA+Js+_pBh{_f`;zj%P5-*iyd z8a%h&`9Y@&(?1h4P+j_Y?pW;`FJc|p)>QZPm%8BOM9%ho%9gHHxu2HSu6@8RLxN#l z2s);46Kb=DHOJE!`hNtDtz(WSegQoMtU42Ku?Fz2St%Z#ke;QPNgwWV8Y-@=JQsUR zR+fpnZ}Y6sG7K8Nufd#Hsn>o(GkeKy)v64YK1JB)<}A6_ir(*u+WW_A$Me)ssJqki z%R1zg;#+IYjUagM1tl2epHZ?~QOK)D#SQ@1tw)I-dOHeZD;QFZWQJCcU{B|lgxx@S zVdJTej8!zyGC(pwkC7T$B0Wmplya4#4|*#1Dg?$Ek_1d**#D|O&~Dd()dn$o>B8z+ z>U!#K{muEC)A(lIW?IqwqGML=vczGhW=C&V+7!>Z?wjEe>XGhI;*sG|`+$9Jk!_9C z7_nk%@6s&UeAV3D++lxbpSa#UFoR1w#7sp>#Y06tgpv9!6>FZ>(s}}(uPOVI3(k>( zu)Y5R-ZlIuK94EUXhLa%{P1SNYJyKfL;_{PFrgA(Gv64IAJHq36VH|#p_~8F6BQHt z6y7pZ4XLB{#z?D+3uekN_4YcAdxOJW%K5$T-FepX`X_%RFQDt*-KZ zqp7QXOykg%tfgAHuk`P!J+bRC`Xd7$lVo&a%<$kihBUq_wQjwPVukP(9*oN3NG=AF zb{~VL9JQPhnpdQ@rJkqWR-?cDa~~-7uoAoazmZ&VyLQf^NItVO{4^TK@Q_aL(h$!;D|PhcZJO-t_Ev^}37n(J;S#DW&PJ+PVuKHBPgc@GiZ=kI!-KBb8MW$MES*T)i zs!(31GpHl=SSfJOF4zC=ASAUmM|}JGpdgh%>53=It$uT~I<=14OmR(75~$Zdp0j1= zty8-*r$a47TMJljy;giNv^9I#xbOZ5gSW>uvik8=%}Y*O)mQIDWeQe#+cV?ACfxa@ z@h)pmt@6E`o8uDhrt2#NbnCZ*)|9wy7bs;{g2xEL>ZyY0LflY$)bdu!Ae!Bpld3(S z-#4*AYL>a%j-AB!uQQveuVy$Cz}NX6EXx~)iLmDjCT*DQfBiS`43T9!KHjJk%5l}% zNQu1`Pk8Bln17SL@xTc6`mP}M^f5cbh*3VxC+6@{DN5kPt|LliKu3b zzk#&f8^A(z6$NKZH4&V#W0MJk)@PpyCG<~XMNBi98lEx^UOfC(s zIa(geIoacYwm#3El=8r)5QC%;J(piBFQV#(&nLnDNA`$ieFVQ!bBcpGpHE1qv}cm4 zs^r*g(G!#)`JnkHOxZJe^MF!SMt&gUg3Rw12`gq0#uuwyISlF05y5&Obn3VI@J@L{ zGPn^D5S>qacnZtPeIipUcCKj98)HRCh(g~0=rl!j! z%F-7P7X#+@=jcumY?^TC|0FN8m^H7q?P2RrKe@5We7?X<}6LBSrpgE z1(+QG%`)pH-;2MO5UEzEQvu|#=M;AxO*$Kqw5C2i;f+#j^p~lRa?EnI(s0FLb5(P8 zbLVpeb7ZFqr;PeqQ6*l*UM1jSiaC|jOn3cq3`h9IP$cd{@nUjJ7!@o^6&!38>;Q2E zU!xacUb2860Sp1;0dxUW0Zh4RiZVvhWhLx$I;V1{^rv>GIqrrpk}sMssLS_gb1hofnB0jdt>PDg!`Z#z#_(OpxsI=iFSRdVDhmw>ujk zi(#aYVYI$_r1FBouAV8|+-Ow^DQU94hmaI*OvLq5-5fP$;@V0`%biqoV7*GNXOT@< z1Y;|r`}e3UcWKJns)@O&wz0O!ZO!MB;8O2B<|8{$17ZLX>nVj)7%f-b6ZkeIWNTw4 zU%FObB<&w~w-w}&Qjf_LiYcb69Z0%X=1_5`&mU5`slSmQ zXaVH9rf(=ZGFy%RbcJgx+K{(~&xq6Me;jRzcby9%}S@C#3+-W`U3 zazUg%fBiG~#hH?##v(v0KCS$s#I%ViQsZU^bs(N+N1smrM1B#b6zZ|mgHaHcPbI%X zw^p}Ow^1IG5b1Q1)fn1XL*q9_^*qMuWRjHQvAct3P(SFaPeG?NQ~LLDxq}iA1&Gb3 z?XA31G*I4H&l$VRI^(h3=}n^ag4AB~^0@r#1J=zI0t_Te2i2 zwR1|RwaCh#G=GbH*D^L~yy}e-WEI5lVL*Xw0h>HtO`NhMT~XTX7_LP%n_`f z+B($?vlxxyj52ads*RGmmFcAcWdXJC`35sgw((8we}2`g`-tn5GH4NNXwoFo(gTJ8 z8e^DawifaAnm(#JW%ev8nCuAtE#ge6z)@Nd(lbDxOl$P|0L?Z2O(tc;Lsh~t$u>n^ zAB(0Sjg3w@SysIiQKJG)rT~Aj;@69+kZtM5=Qz+D>0C7I{I?X@Icpk}qFs(q$vIe# z7)dpxra8(5Ni%7)6vfM8g!mFve5rjKvERa&HYuuw{D1h1_^bHK?A5twvjL1_cVn6s z+4TlKB0egs1!;5^vTdpb7wVq09e|89S&PJat<|y%EkjL16;v8HfEHjmZ5hCwCN`F> ztM(`BvCUJJuf$+3a3OGxiQZ|}{I@8- zMid9RT#nf0qDvdD$pRv4skL98r3Tham$DtRSB-F&GJdmLjcAv0ud^TSUnYy5-ATJr zQ47OP4cy@w^Tlk)h$N%t(cOO#Nl4AJx-$?-U6<~k3b=n|EW|wh!JDO6pmxgP%iNW? zRZCbpe+uOen=3<7+;C#g8zm@yJm=%y4MGjbWK!PJ*iuWNkph^Fxf})@LLG*K3P8!A zI?yjr1t<&D=BoO$#J?a%bBe|azznEOs|C1?`5Y1)>K*bO!X1XXsZ+%nRlQVL&eE+7;Lg^(RQ+e0k6#R1MP^%$ zUm973Zd<%29IslW5o@dAU>o-dud5V`SKD8Vv<>}TPE|2#T79CdwjgO+`MZ=$bAFM^ zR;?*(P2ZuQS)F}B(=-RSwpo*Xk#B*^R&TMwvf^^#(YE<_k>^a{QlJW7Wv+aR3>Pic z;(^)R!j$cIPOZyIPqmIJfrUI<=BAc4smrQIwbv@B1^!e=wk&^zK-+F!YRmp9HOI73y+{GjWd;UY*=RMUArAl6&;_ zIHt11IqZAZ_7p)yEgP+q{~_(1Vl;``wLNXyp0;hhZ5z|JIc?jvZQHhOPusS&`}+_6 zWGDMzCp)QBR-IHTwX%|0_1w>Op*XT?3$4tPpK`sSYRj>gK3nvdt2S*n@tufRCp0T; zR9eqvuQXe1G+r8@Tw;I5eGMMfwpjr*d(rEPtgt*Hd8e0aOKdS(;&^A8>o8W~Rfe7G zcrT2fsJI8=8;Bp6JaW7S0fxG@^lNTUyd8ty27C)Sr}Pi1?m=IoHS^o21O`%!LRLjx zxm~4~vtNtVmVegasx%ztEFa|DvsC7A8q?b`8Tu&Wr{y{lq9n|a6gpi%_Y!zi|*26d=YU#%Jxs!_^XP>@QFDH2mg zu_nwH6)Q^pJ8kaU)Z(Vhpj3=r6gZ>!nG7lILanljMVT|Y$5?mCuEH%vg0h-LxifmW z*t1fzg#Vo6ymprb3o4ILpv&nZT z;S|CxU(E-bfvw9|$>S8mt%#ZtHz}-}Sx2&|;FN4uMXg-Tk(r6C3tPvtsb}QNEEStS zTRfXTTP`#~G-9nwUkX?!vuS1&@D~1u8+zXq(ge0HXdS0g`6DeelWG!cG+wvARIm6{cr^pWe8h7qW)_am5zb*Ag5HR_)O0K9=Td&yUK0<8Zv}7ApNw8bU#&l^ zO+nqNnz@xTxwG?!l847P@GfB=4IfdTADGd0sm(n5x#q+1W7Qj*SI3wCSAMq;{nEsP zeV5>N+4a23IsIwU8~zVxOm4fTr?MxXU>4!5=B(zl=G^8a`c%pPNR}eC8bnqNo>*h%>GfqDd;V=UC~QRy9kh1 zt+G;TxkSF?bf$FX?i8Sr52ykZ04e|_f0fzGGL~r1?$1=5GTODh1iTbB^ODPrE!s5l zu9RKMI+juvC7oj0RX0nnRCQH!6%a}xmQ$Bi&U#6+b0EHF!(1ReKbA{`4sI zsQSp$mtrhQoyGjTJL5;wM~p|+$MCld_dlQ7pF&;5Uu8O~6^l3)tgrr=WTvC)ccib9 zPOrWQw|Rg`u^S{W5`s0cTLmvqPZomBk%Rs7{bR$Y+h?d7s0S$IY!o)=jKHaW^j$ao zidvafy3XI76c>n+K}>mCrHIbZ9f6zuQi4UbVyn~_C{Hl-Fg1`6fdTy${g(ab{W$%E zyE419yVkq4H_^j-w zQ|Xa+=5;|K^i|aQI+^mS*(@hK&I9~}j40iNJt7_rKi7UGkG$1(|J;KPZioG@N|Z%X z!%Q0GL||87wPqJwG=a6@sD|JNR5$XlU+h=xxsX*0PS&i@zo|YWRP9qx`!MeDJCB-jYbx! zB%d|yH+mR~I*;WjvKo;_62u_Zo^&Hc#B4PUe@8v$KJ?X#TwA^8El|GU;i%e8Gt@mBs$O3kHLMMz2%<5NA9=pnbbK zKBCH7=RT#O_PqE8M=Hm6f8l_`ZoPH)v21Pi4nu!MK}%h;+GhDImE`eswefvN4FTT` zUDIQCGS#vL_?Ygv#^ZFq;OPAB3q#<0dqH`*xKAkQ=I{dVU8~mBHQiuyF>3ysG*>Tj zZz7XX$Sx~oofw~)m^#Y)4o?gQ%439g(5j# zWh_|*CdznTMVh5%Zmp$)BVo{N zB4{F6&pPs#mN(wIMJ3*^>yDw5Wh2{$w#z8618etCbTv>v1CCnFpV1k1F3X$l!UPou!7QgG*ZD6A^)wWY;o;og z9@z4HypCIwgY`#xPKwDthBHl^AzkMzkB+li%ZZ~I5^9^y2uh|Q5V*&Yp?_c4z zC9EIInTZEX4LuoE#@*i8zJJT{FclI^10(l%5;r=6YXW0Nx3Grt7P%(FiN_lDd7BPW zkZAK<)ud>Gb3FZbIw@(4*m*bTqxN_rA91w!2!6%gcoyFnoja zj20D&&N6J{(8)+r$w(gn6Sa(B4KX$|?oBDim;9Mm^oUyv2VH~UjcXCQ*DNBsw`K!$ zMNntShsMpXb>025qec}?4q#@b>`6qw$qd-a-0|iyZDubicK5?ObS=s2_LoX(P51FW zitWvutCEx$Qt2jN^=i_Ty1 z(8HJO^~o;?;5C&NAlI_s7@qHaa-)eq>JVay$CAnWfG;;s6T5TVO5TZueI?MLq^9T< zg|2g^V8w8>*S+_$YW}WOEIMbZF^%~!@a%0MXkNo2jNCAMFdV|9E!|q4kd!^K+He?{a#N*PQIxg#!?n1Fo8o> zc)g2thy*vUYoa@UiHLwM&7p*kY<`eQXR|VmVzxDpS1K+o!IyA2AUhxjIWF74JUgph zk*{CyFriK+{rf`Th8UDazQ5a75fqI4y_1NI&T9bZ3IC{P0Di!>V13sFM* zxVPWAK|4EB8?F26G?q1uR-_qk@Z4@tebD$TgEh66ZGdKwX%If}m$z%vZ$%_tSLmy^ zmR$~Z124YLVj{BN$%LRryDHKIBQ;hqI53QAE>N0Kec~7mBd~9{^N@uzdhRUqC5+Rj z;Bp@lg*-l0Xo z&^a&!j+f_K7_&cmjKo=_R#k&}jf~S8FT`KRC z&6Q6?G%i!O*h-_z28+)x57}=?lTS&6Ls+YSSXY+ZRmmNx+Iwo5CSqEr-cO?-K0yWq zp~m%~{7Zz%w}_t%UAz`{jD!|?k?3CyQ-j5y1?7@;B{cF{bt^0(F`lHRp{X%MGL?4t zZ?8a zAC5JHi74*Kjd~%`@JDSXLGt~8GN1J0fO0SO%xM?}KdPI3Q97;-p3WE3jKEFL`%y$$ z_vl80YuBVuC>9forv?%)(xf5ld!K)!0!7RA-3aNTIJE^m_{i1mxT_FWx z1Nb;W%}>qI&2SrkQ57%Cq8Ee9aKIuq_i9-htx%JlHs>DsR}8g-2(`%_vzC&Yddn&( zPjMo7@b_LHl0AdjySqC7B;c!S%OAoESn)Ghd2k=uuuFCbaYsHV zc4o8Ad$^9LV4k9Px}P&B+k(KcvLmK!^DBRW=tJg1681B){fZj+wZ)`E8%p%El6t}S zTz=v3>-Jtdw`Y-Z%R~jxA|m@cllAH{nx>USGwBg&MxI;jw~w|qvsTy1ZqpT4XiL{P zGV=Z#LX81hN33TsEV66jY@%71rcsKaQ4&ms(u4uSNj0mrOL|GtVyNDXvaDwwHEB0t z{YK zRU-`)##J2F9n+}y9YCMb+`QD_zcHV}Zp7+{azUSxjgr6@&Lo*m?;hq1y;(AT!46V% zek{x`l(zUevd!n*Eo!P}YlU_J)U!SLp4!plqjYj8FT z4JVjUXw|=rPzM!toH02l2qaYHVzO!vJK~%jRi`QCj+*M-Ad(8b+PWCyT>;~tgYdjj zS2Tyt`(y>Ze_7^!T@2?E_vy>)?wP`Ffii;s8NYO>MKdqi&pdpd^!gayW#3cn-nx7A z?(F%({0w)b+ct6YK`>QqJ5;kLwYzA>ZMts24X+~koS=nBphlbIh}Du9qm#m7YD1k+ ztly|7c1t`2>xk=^i*Dms!eLUPkz0q*Smt2R`J0x|nS%f_ypQ)ha`AD%mRy~-L4${z z1lo|5wwksuGCvL&i}xF&rIt7jw@x0eOY)>JUmb4VOcuUioc;@2q#M!Ih&5_d&a9qI zgfaeksAF&07M4k*c}UUbhg%3rVKSsThejZxKq4Yv3M?sKbY96EMxY5QF{}%g0_u$x zS)2APAo>h5UP>G$Xm^`%+SqHMEIuRF<>ALd5!y)QKtUrQtLcH33nkG?PS zg10d=;iHq+h$|~~A)_SWbnTUO=yx!>KI7;?AV!5RrVgxuc#uM6g#zpl3mJI_4cV~v z3F~|shTyH+6a~xprV5R-pY`JbXvCaZDC>H7(Of6yct8hZ&wA7;|3~ETha&^FWiLz; zC$1XFT3)Xf@nz6G+m>{L~zdCk3|9v6&`2mbQYwY|#Q_Ls@!3ln<4qx$Nkw*0i>vvPxlErNYH z`?-nonQg6&$KywUmhzl5@xFfW&*&j(qXTd3GEKFZzr*vNFLtB91)voln#lU~QgBkl zI&x@k9sNn~KnG~n%DmNWQe1~NM-J}G{v!Pe=HM%P>LU<}%$wu#oBer<1z$*{hs z3y(xVZ94TL-6w%uF1w%6$pTrYL>i8HzIZ3U>2ZwqviG?mttgGNXxjh6v%kc!{CEK{ zV-ARQqe+k*%}i>eaS%F+mbN=v^pu9@Ky;u9h7U_aWTrMzpA1gLV?>_8Ol;XmYa=`= z!DK@#3X|@WlWm?mRxW&r5i60-n&k7vlNN35kV;Qq1kh812j{g&%`(gR@dx6~fzCk? z_Mf}*`n_P^QVaUFxq89g(&YP>)S@QGqp*=>Jg4RiCdp#{RM{+(x9X8kXXE~^o91R5 zIl3%NRz5Hk9$II-7`^80%jnVTYzufszX08m3mTo$Y?^tNYg*is4;+yA4?sjp@1~4N zD2XG3Lz+G(j!8)L_JQG={)!-r(>%ls(is?IOtbD_8RbokODu z1s{fo)6yk+vAON3xY{d69`WmPuRN!L8SJNb{k7x!rxwUym&qKqrcYq;AY*pv6pA{J z7&icK2j)4!B`(AHfuT9KyEb?0`d6NCAnOeltiOaU{`6ioCx&doFDr70aNCyD-8cN2 zw%XYy2&q?o4_Bn8_znpI@gZCC^vF!xUZxcR$e`eA;%Yg@C)7;xx@xAAz&x8?g06e@ z8;ZXLBmLafRJi6esQo5P0iH44e}HJv)1il|?Pjr^`@~2vgf@4)ZY(#2IQlYp(vST6?aU0)tG5Al3?`GX$uPWuXb+ zt;J*$2}LgsJ!L=I!GjNRvc=sRvC9yJrnc809q`;R2$R=b@gd(3){Dd~t}l zLcmy3WC=BP2NS2VM-!8i)9H0I%PEHqK~|(58F*-9P#b|WL(_Zmb{Ra4Z`o(h+HKWt z!9GO0t$T!6>4GqcIdK#+`?j7U!yb9edhnGoiM)$OK^ z=w0tCoE;^I-E;(7k=Miv9WcDcYaQxh2W$s7a#~AmEWkUWX2onLJ#c)cdXHe&>bHJp zM{Pe^vA@GMHU4fLbo_8e#)!x$aybmPCvTH?t2vvG8pL^JmsJ&V5`N5Dswmg3`dVJv zcH)cP2BMsW6l?z_)&_2DOYGf}PjVH^zRUZyg%kaVAs9Bg4M*yn7;Kc;)Lcm1j-4gJ zJ`dCLdBg1sJ9sUxC@#A1f}SRWPvkJhQ=iE5*Yh=_8fWa@sO47YOy|Q}cskZ*6yX3P zTu>TIvLRr2K{?&eWj))ukk`!SBRU@?(j}d2&YVLi!yIf=h|k1t(r?|FB4c#97Gurw zLdaPrSFk>o$_V;1NTFYAH}$6brp#8mF_d%9%&zMt9hH(b=}p40XlWk)l)Fqa`5^nG zdODUbQ9B%@X-U&{yW_80aWWMial+8=U4T3IiU=k(GDfWTC}Y@@u}g#4D#V_+eVY2% zabaqd{=U{sJ^TX#o8LnTgg9(NXw|B?8Zcdbl($69tWA*Ke2TXmHgrsU$_eN)zKf$) zO*W!`P}`o%$*DJR)rquyEWUxmn%2t1tfZFQbm;l%yB^x~JU>T3g*%WJFg0G0;fcX0 zN_zN;KJ4A6+b!V01*bihq#hyY(KdVSw5GK~BS>Q(@|0+BzQYWBEg1V5JSoDkQ>=CH0{&#hkBMmuotGxA~aE z^Vb3tu{lFQuE310sW)Z(*h`Ljb5tv3~&D5pm{sgR9$K;MJ5 zS&-#EkN5qKzIaF%MF0$8RCO>`^_mgK+2`LTu`II!#n;ZcDpW#2a!M(>aTy~wHnw0XK)(1+i7zxfOt1TH}?RYP-1G$#~)wHEPC2iw*oHU5{=01!PX5m~<3~nHo za}xEKkIv*TxQ}rGfqe)shc*)}NY4UwH&AEO&DEsUF~2cikpgh3gsI)(hX-OtTVyYO zvy@%QPH5q&A!(E{&rUS08505+)}7cbwmxeW&=sC1gU#%S$FB_ZNn{3Y$@n29h86|( zg416!o_avqi+_?Hs8pxa+*mK8Jhc)0twdoE3^~BWP`UAT2Xv59 zOuAE@aci5NqzxNU(q<9*4rn^qC2JqnU+|r=;K0i2YkgGY-`}E@{Q{NSLyveK61Z?C zp+Q}TLTHwxapNM4sVE7Q`SCBDAd8Tp4#NtjS^`=U|JFDNr*p{P;r*Z(V3Gx(LJ_Z4 zrShTr1oXZGI<(LJ;`7{E{Co^NDQ-wbEv#=}Uq~8U)-l#8?|tt=B^ikYjFyg`h?9X0 zzj8LV*51t8Qmui03)YGw1(A$m2oZ7x8-z8*3RAg4F+gAlwj$_6@-6n#hk+xb9D}mK znay=dxSNIgASPWf-rCVl<`K%wXSA4IZLhI`>ldPs)`N!ypr49nV@3ZTr?Id1+;V0jh$$hBf-_tp}=>wY;XHqDNb&X!!!Lhg(Sx!IR6Zjj^9GiG-w4+Ln#h%)Rn-t+?lXbT-kbtl?q-$2w{grK4qTSJd>v z6qg+56Y;)F=(>_bIi3^^4ktY?KD&Y=TXddEeL-8AB_uXhvR+>`k(si|T$$8dSD%fP z<)4_S5Mt?rWyvziu`eC_(J#~bd}VTXQuXLl$_AyAyTo|ZbpO+i;L(PgE$UOZl9&cc zN^|#GN^9t(!YQfgUJySPhE6C|j_oDsSVry?aJkNY`eS*B{4~ zlsFCuzG3VPP?Ust;0zoth-1pZR2ofbt%hPz#n3#jbgXvr>A%P4u``uC?SIxfO(7aL zxE-A5!p~qLf{WJsi9fDkoJgWQ`|mDQJZzo4dO>qIaDr1q2@X;-y(dq_ zke!fe3i?T<(MX9-(xE^%NyOWsRR4gO`c{ID)#oz^Bs+r9kQuCtE$3~{je?g*ki?gZ z#}h%f(_Ii}*>zDE6)GwH;S*)6#fz+$yz5|&Y-CMNA)w^k@$k=}g9cj&XOy57W42P;xMRP}Ikwl+nHjIbVY^bQVse#THMh z!wBM~L#ZP}Bugk$_8;T@vaC{^C@ZaDa(^t*tY ztaT;>p`NQ_oEj=A{4{w$m{kH8U0*y&!KPzGjjF4H^FGv%G4k=mGata|)7z<&aGe9l zi{m8%u(oV0);~s!T$B5+I-Os=Ji<_qTxR2K=gYYVkTg`8uT??-ERu+n_~^3SI53PU zvh-&W%QM52mdpXqZ-7GYqn9m0iO5N#g`iWyd!ZodqC65N57z$8ZM3HV4IBQ=(BV@*HAQUv!|bY2h=io{Y zt{H|nmANe-9gGN9JsDBYQB=jTLDgUgMMq#|)%_GZKekG-u4(b##-aC~M_O^N!5lAU3-5hJU`&VM5XHEqd9&Rt z=V}{@G*hhblm+sg6J(d%*y}MKC|~Ne&m1aRKFx>z%q}@aTMz!p?W^$Mx6P`8jI<)g z_OXBi*@Z?@&tHenECyBzJH@k4^_Bz_naYeR<|8q1%0!0y%S+%ph%#UgVxpx=I67OA zd5e&c%&US9b;eh&Y3C!+YGFLjU;ED4WN|^7^ie*bN`fk^%Jl$k4!AI57z%0Y?xM4d zc~X{H9|6}X%mZ(y*|-L(W)aE89fjSK^bxn35t0)UfLQkp%37TakKKx$k}su+9NeA_ zp)9Qq(K>4R;-5QP>Po*z6)g$c#E3Z+QwXk|mRc-J+yR$D~zFEt`r> zMthDja#y#G-Rt8JvvE^BWFgTHT}JR7IEcRwq7nrl#7VOVMjE8Z)?>DPO?rt zd*OR(3Q@4{SF^H(Q$cU9 zWu0Cn(>7|J+|jfIIjiReS$F73EmrxauA*B>$;c=db2n`IZKfGJH&6IeC<`jjVjG46 zk&@m9@q_x`g-U)1Ngu>8>a?85&sl_q>>rWP4m)I72@oHKcKG^FIH(?|h!z73QMgj?ZP<4I`8_S zn+K%49ES*7Q3bf7(q@E7H_`E8wj#slNOSsp zah`hZS3wRd0_t|o1zc=(HqdrmI%;e8U-4+DT723Dzpr}ph35ceET#1$=ZbB=KM{dzrWrU3W{6Wz@d(tqS5mT{puCHWWKLjn0 zC;^Xv)qLc)R5Pq`YQC$OSe^3LfTquJeR~^o>5CT9POnY+aQeU1yw zg~oA`pu@SK1F4I3fH==Q8lmhFbRA3{?Tt~uIO=_s1q1fZfwYCXyNRTH0lY3GxWtf_ zir*1T16@47u`l>PKPi9cB?{c1FiwZzPekYTW~vqwT@9`Rq*?8C$zJuf(^zSmx#iXG zNKWUGpgBV65KkO!*^5WRpgWmf?X2pKU4@OtN^8EgHUDoYP5RImn;Xy^R@2RP8Fae4 zZa@P`o$xL*A*OZ%S#7fFSJRXJFxZlW9@HLI|9m2L*Q?A+){o1b{7LKm_xG^eimd1E z4}?nb1fPdv_0{VB9tKB?Q~z}2ECyqSfa`U$GQUS#9o zUuMR8$fQS|-*K`p+_WQjgSCx{5q}TWz-Js3M~OhhfTo=^*tOEin(8tdrJhn^);#K^c#vt><} zM|gs9?=kHs4M*!vc6J#LQb%G9@r9z@U^$Qe-^H$d$S3n`Gyb4AgcsSKDWWRV1`pSs zMK92dL#QeD%W*Dlv?cC?8^<&2AWUq?gx0~1`bDo)+_&}M1d>l+E7UD%a2Nc07reGN z#HQ}UAxUO~=5HdTdGOL4k}&A!zXphSbP=fH;FRZirobjuu#Rw?>H38Ug*##r^s~YG z-6HirORyUF8L|=D_G!QsyDagcmp4WWeAXydSU)jp)KHiTL6bPq8i z>JZpqFdGuoYFv=}oF~j1shPM(amJ+P#}t0Zw3F!PrnCDjks3FKYsE&8#~RMvZ1GjO zH4E96T%t^vykL!k3i-i|69v^AjSJ_8Uc)#LJ`N{A{+-~B(9xz00bq@aBXh9B0aBqG z^{vlVcnj}5>exv{*+go$D<|1w%?5s(mWE58?Wk;q|iHr1@L zMr!#fA)BRQvd%&FGKrCD>P&HZlk^R|;GsOif>ix-9@qrStU;N1GNuwpRe0QrQt*W` zwCKb?<)y-%8ldvrDfP5W5j3YjFbvmt3o3^gG5ILCe<@7fx1GOuSCa(6D%&q-b9%$I zaOs!Tf>vao6`nbtkFP3hwVieCa-2sE{%Mr?z!+C3IhH9E=JC|2M25yhWDG*&S&y(+ zMDO4q$ORSJs&D{}@Ty>O`E3W{4LJvYU^puI1^!RiJWhuH6*iBFnUUfDWAoS;SeTjq z8#b>K!V^v8`}%t`ZH;$qt${p9);IHKyEf~A(B%RQ^}}&Wf6)1ZScCyqR4OhecoGWj z|ND1nxE=;j5|Ox=UuIX%8fTyH3lHG=x#RoY)rPTNnfyCg6tOK+`0*PhLzkSFqo$J^88(o}oKG0sZ^v6q0WhYlZwj{m!+e4gC*Tvtwq z7hljxf${-g>+=o(M5jmD14Kct5Mg@RaF9WFuY@*srNJpI5{?(#5$9t-P!3W-W)S0H zrDC&j8luHrg{(dFu9p6Mse0o{=WIAnn6HU+&r|}C@pZ-gUXy9F&RV!`J&q!Lcbqk` z4@iDfA0=7oQaEMa1k>`k`RE(Wq!#~T8$@ps~fr?K>8f3TpTFOUSaZEwcL)*- zrW^DU!laF-=oa53gIZMQwc(PIFIGIyR^gDP2nS%V0=xXFL}B3(D-@^nosqUei<{hRkU*{{@Dlo4sy6; zW58p{JNuxT{{&fe?5L|-`-}U~v8jgJLXDO2^UL@-cO>-#)6drM-O%#^)9)ta)Wz4N zb);X~P4|tNeiK@L>hFc5u&Z(T4aY}m*PY!>_8=!qvBkP}_`=n?QyzFD2n85;fR5g; zRH6k)cq&C$=1X_1n*~g6G&?GSa6_T250wT_<#d+<4nw^iofwB90E(^Zdy8W(F!DZF zEA<8+MJf0@>a#=bj32!lp#k(eS1Y*H=?Y)sYBQ!)imAS{f^%$pEzHjECH)c#L4+bY zg9Sd2eH)AwSoxP6zq=L)84xP-?6FTsPXf5b>)Fu^TV^am+J3K8&b?oNGwW@1#1`JQwAwW zK56cLMe87efAZ|sdEIgB$}VWvX^G6}H6_BV(C(^5LDUJQW7yyRTP;pFEf2LJ_{Z4bb=!Aw(y z&Yz2~{(9%w<1?7q;1iT%Fd;C$MYv1~n+F$28xK2tav2Z4>7hGjF{XbpJ=-j;sK@w_ zDva7$1@gEA!Mf+|F?nCYz3L&YiF088?v+isXE+*<%CGGS$Yh6G%*vFDLTXebF!;_x zxcx>>MG@n0(B~L-t??3^GXBb%UOk)C{LPUB5N=iTTL9)+n&dRf*W-rRG1t?1*I zzHiCc1N<-{m=Cn&HD*Oc#HB#KoSI%gd=e9NdCQ+*2+e2HLkqsncY`oj;vf%8@7suF z*pKt%r7*BX&!tGJ6Je2Cg%gh7x}76@QX}sqpVni6Fz=J6^X8!})~{o`u6Uy|H5!R_w=ddw4YwpW0GJ54`;=~DIBsUl7W*>3urvUV5`K#- z6=OQv>Hf5n@m zYnsV<$@YeaczF37dH2-u#`a!p7T{JXq*q<;_a(n=*OGD{YiOt!9e1BOmlnUY8rgP? zfL-|Q>m63730v$ez8$TbTg(*2m=ixFZc$vK;NYF5aTE5$3xH~l?Cm00EfL}S2jS3A z8uYQ%+W^L|t4*O_GhdPWyL7B&P682#3Ys0y^wW$f^jO=BT&}Rwtf5RchfeNN4R)Ea zOc~wU?Uy4+$xn814u2NH6L@&k@euub?h3*40}1J3>n z7$Au51oQh5Nm!sMRI7+zTg(9y$Gipfd?w+|d%ea-gudQI;TYi}X3%*xLML1hK*rJ` zPQ?Dt1JMP=@EoMoGTh`uOlOswPae>n+5dWNB#1?!k6NrDaC!5WHy2SA>|re&vt?Nx z44#_u)@?aHr{ zNRM@h6LDw$fxT7JK6+2o27G8)IS$e3`i-9)QxC=>m+L84c9$oY68M=r@OL%5J4v!& z6)k;q-i(S{j(bh9@f$52=Gnj*`&4!o+&NUmBE$^gq+e>?#nu{eBiuXA;@VjCLWlC-{r$-&ZED@pmsn zOx@onnJ1!uAeR;5>>#IGMgjUL@IpD*@b5>Iodkwv$mgO1YZ0~xg+DycZ$kVSMkj1F z-`7rAe(u%d2e%!3Rwp${)`y8y4?4Zi!+awz=0AQN;egdI{%?N^2$|gVHjZz7Rlm@AQ0ERrlDTyXpBcX3a_jUWe^=8K|Q1n=6b z%y_rcQy>ey&S7-{qL|!`xv)+#MQ)8c)y-~kZAAt)_UTM)*Z?wed-WE z0@omVy#xOYWn#y0E`6zb;0HgC8C}R=wwcW*KwgNj*gx#PA>S(-zkm9AV?qY3hxCbI z>8Bl~xOy+?03`>o_bFmst;V`sM)*{lUu^zHy8y+>s(tqn8h+dP5=sYaY+CHq=#8Z+ z-pI+nqW*;NAcs$T;)Z0~D-`p?Ly>%CApbYk*b~c)^3%&3^PY92RCsi4o~U6IMgV73xz>n@PJyXTNQ8hn?+e8(!Qv!Gt4@~rwP=a(Z{)?TCl_+ z`d+bNr&JOB3&mu@+DW5pmbYODBZO5haV0aWKW#h4KRva5Ur$#Tdw(X+9I{$6t(gYP z-CUCAS){8vzIaPGd6@=Qn8jbaxya}H-$Tv`cfVNcFC%=9OG z{YS;uBVLBKQ@A%|H9OzSgdN|g&o48#>R$;w^FBIL8_wE*32*EV*#o{P`70E;J*yg3DPJi==~y*6j933$o8?&bhF+w0)b0wOagkBWSnl!0dp?U0)b zPv)a&Ck@`&r6nk5ck;LCB?;n_zqTOy6Ol6~4*P-05MQ)AT>Z|L?3W(vGE}))P2c=F z3J=_j@+uuRx+_;ofGM&@B(xRuHTEVE)Zki*S>=Yyd&n3)c@tu7F?&A^4a?R1saPXY zr|Ujibqsgp;clQ>bJHLH8qCIlkfhzyh>ySv$12z{CFXe={w%gFNXPgm`FO3L|Z zYU38`>fC7PD6r(d-&yjo6%2qE!96d)MSPiDd)}Or3n9|l2_CR z2BL>sb^ooEz)xz%%m}55r4#;5EVX$Kw$>F$brxC8r5==eqV0k?gFE+H^?W^iLp*Ol ztwXg8uc}QmN;|*`4(K|GbAVX@AkO0XOeZ;e#9qcZghx6+L3n_^AkU~E&ae^gPXe(W zGi_~lBj=)*QmqtWeYkDuZ3At89==*YELX&2lCEZem_n2=Wf9}5978u4FUmnr=78kFz&e`+0btTnvC$kFCCYx)L1hYCR)WM-~o7gFTgd z$!GRq-vxSYV#d@1RjxqjTg_!2;|U4r^<0=xKK%|W6=YrYszmrm44ElnF+aaoTmAQX z$@ijK%h@u^(MAWC*zp^3pdmW`eBXD|5#j-=mHW$_d98~7EArEcvxcuSKvGSK!-5Yk zd+zG3f%3xJVtcI{Zk_02f>UXM?GspQ+j_Zs+pbjEmH zDf{9AhoCW}KH$siH{7e!FA)L7V%~px-7|pK(JkxYugifhjc*xs`~#<131VyZwjA2$ z;^WaT0~OKEMf5s;8cs@o>jtvP)uUuOR2V2HM&--a9#6w$?L~1e3R=}wNZ?F|26sKdFFV-l znjuyvynE>?(6)sTdn-5NeuEgc@sy+iWjKH@)6c{ma>*J8=oy~Q=y$$kmFs6mx=f9@ z{@Or(Ha}a9l;a|YUd@?a(;jY1$6u8K_6+voBeuaZE!5v^=NL!h}6C<$|EedZn3N zd!FC``MB?-N1mxie_dsQ+Y{ybJOZ-HR&k~nSHe6Ttx6GA$IkV1u!Vc$db{V{74-sm zKPC5tihcTUdUybTAAB6=WWHjS8G&sMaX90ihqmngXYcGId&D~0vlsU;WtS6lg8*x* zUKY7f^-FEL75RIpOYFncxR;BDkYQ`2>|XU<4Dd5zAMw0nUDi=$Q~}e6mGXn5f7agc zyE`eMz3|Id-^r}U8m>uL%*n}DxtXYI%nf|(=OHzR^}uo<7~YlaL?+dnIz&Fty6y7% z#VM9(H~iL}s%k0kRGhe-aTiKme z_+ONrWl$W^v*?o$Lht~=2?SYW@!%dD7I$~|;0_@KhY%JPcbCPT#ogWA3A(t;<$vFO zRqw<5a;s)eP0g7)Ra2+CtGZA3uk`@2jPw$W4s#d&VEzWWI;~=zF5W91`^2J2kecBxb61QiT$gEKD_PXzpq4(LluGFF0A;PGzfP-LZ zEx=b0;rQy^K?#c*SGuV0QMOc#G=gHXf1vvbwrw`wxY+#cSl2bNnD9LhN{9DD^~Zc8 z3AJHlgyV>2oYj%Z3M{6Dhl&#*Mv85TM{+ntboM=n2uQT_O46w97w1TGbtU&Y-e~ed zI=^*4JV>fQ+*!QG)INrm?=13Gl)24L!>3fCMFwDt1OoSb-o1=~YK1x+egtB(H|Vhz znTrGmv}1x~JU7tLhKzl57M=6}LH?V^;NC33v*(n4S}%9%Z$3tfVL>(hwa%l;k~!J@R-_-n&CQgm<0rU;qj!rf z;YWCQT?vL?(1>oTt`Ey%-Z$H60`1|k#gIr1Bk#)s5?i{RPXb4dn+!XjX##3$<2&3m z18WCB%`yr({u~u;oYz}=9@4D|lXV>=*VMa(&u-29CU^kD=$c^PhS*NmOAcsXT-sgS z#UDR39m@EqcD{)T!C&s#weuLTUGWFD=`K_Cp!kY_OCmne2s6DwHZwc2bd5B6hfZ%^ zb(tKF!nMXlID0T|LGvPMa+zJ$Gn9NpQeZURbs&chZTP*+yq$Kh``0$$bVMVa(2#_~ zh5023vv^6W8xQ8Sl1ewG=}sGfBULm>wF|tNOKqxG3$n^T|I+OQj>-rejD3*piMk0eNsodcaQTS$xR@7 z;Qly<`h@zi9NiZ?I$;YwIrEIa@LtF?7a!9u@E$!h!A7zngev80XEMpp^CX*9$2Zh2t%89OgvNP%AH z!xvoy=kH*tiWC{{u4V1e{;bSJN|&H1rSX_2YXj5QZqP?nzNpS~IzRl&eS06IYIXxx~JQGCkbPfK+5J_u6y%VtHt5FJ$ z&)wh5FBLuH^`?DdSrw8)yu4vC@JEBSG3-JYD<+myELz{#+#p_??ZJYk+?vkFH;WBr z%K|ux_)$*+t|}%Ic5aY-nx1NFoNvh=A{)YMcae#Dhz*;2y$ot9oFgs>_M*G4)z|1y z@~%bZ>z;!6o&std(>U6V4iF!*ey@JuY-foZ+c{$FjwnCc*`)W!S47bb^e1E|j6=RX z{M8wqR?8rwOQT4%jCRZS;k>~FT66Pjrx*19d>7;Q9N~}o0`TQ0z_?kJJvqs&JvnPD z@Sc{C`Y{=`W2LEv%ZuXA%X6XQuxN5^#{=j zX=dsr>|oKq;o#mM8RaS(fl-g_VJ+Z=cNvH65wor2PDrYki`T!e3-0!ebxrnyTyY^c zJMTv-i}7GeJQ0V)Jh83*&pycGOe+->n3v3U8D$GcVF) zXl?rxb^Ksz#Frnoo>$x_3B5_36h!_sjw`*jL-j>jWLqB#B_kNW9}iZ%Tai1IJzGH_ zLYDh+4PKspm;A;quQlcY-lh3o{n6%~$@9C&W9b0aXHa`a>IPCiemP8p|E9vaBc?mBmSf#|oS2krzXoxYtfrvOEJEkQX}zxlR9_lJzuM#EbIee5{S>ha>E`2O1-& zjmI7x7rvZ(SqD)8z)78SmWbyu?BAram_RQpL@D91`zzCH!6rT8UMMvq?4Q zAhBym=fb&Rq5;m3x7(chnLZyn@}b{Rb+cZ>gKlx;BwIS1$&_Ocmrd7htRJr8{#eBB zQGA%qD_2gfBtJ#h!-+go6K1mhYTk*FjCfQ2xn%8^d=C+s+u!Asjf{1H``@6CyFRFM zy0?XNvni!P0j`6d^DKKsk&jFYQr>>KPwAF{r$L%+x|#X+xDWYIdZ;tq7Fv)&E!p}# zt#t2=PryZVq%;0i8tFRDu>t9w`TaZGn6;0*YpaBs4i7WW&9UxE{1=J`12@VJIdMYIn+)sAT+Us6T>6$8qKPEYX;S2?54y?`wbXUnD`IjSG#s3B@L~9dE~8@ z>NKHEt{u&d>8iq52hjgFodhE#||qc}@o4ENKp3wkTtz? z+K0M-p-q1uYCLI0{NGSd$kxDRFT0|84Yyt2*aYuc74oZmx=>`&ygz5|{uXEV?i^l^ zjWvtigv5~FP8K#m_Mr2LzWIE~bb3SMTHr*189hZmFwJ{Fs{}u5!kA4eN28M3KV&59 zt3bGU7zV*BajR~nHPl9+%3)uHZgeJWkQhs~w>B}H;Eq*c$oY_uo$4PD_tVxfS2v$3 zrimH2!6E}UXdG{bR@^VaLQF~0_wy5!IWHp(e!+gHM+U(wt)0r|XrbwErIhm4h}o!v zH*{Kt|C1dXe8?mT>IOdi)W?UXqr&$-AB46}OMR|h+fM7>>^oMS2h0f#n1c7IB@9^t$gXrF28p!P*qRNcq;$xA+wSguIli>amQHm zC^&P)doKsP_obmoO4n$AXv#?5?@MJ1Mkip~yqdix+~VxH5WnW#BZgyKDl6Jpm*Qu~ ze=F_Yp9@KK-N&5{DO;Rh13jaY$>g&fe_HfM>-`-4((H}+#L$}Qd4!q_4B$iO)eL^& zt4KeJ!f+w;)Ckmvv<$q~G@5GD;Q36CEt(}Gt*RYrw*YW#vwaisr$z3tOHn3Cx*s=0 z-SmVFuAvtKJWn{JF9W)y(_M2EA?m5#v^0+vY76YW#MaafZ+1J-=YMGTJXoDc`4c{V z)JJWmSp&9RSR?MEvg|RPVj{LhfcJ$0VVc!-2)-}IhlsZkEx13)DjjGG7bvgAs9|rf z(%w|vFV-n(n##I=_L{sl%@2yZ!*?Xb4 zmn9DtpVY-;1arf6nFic?0&(7kcN&n+*?p`~h7Sxx<5rVIg{uh9moM8xnWe;_v+i|7N5 zxmP?|EAr)Q&8)u=<-?TMkGK5jit(3c#r^`SmpCsBJqULK3n|ePjNf0r>meR+39Okk zAZWg&_>4*VUX$2R?Nhj^LM#*LFOYi2ViB4;A~T9S}RxFqRsuF9p)3iq2qvKB0gsvy|Jff~gAY zY_~Fxu70}wHFqU1)w)7mrPjiKg$;$+ zN@m%~lCDu+oYs!h$(DF_Vj*H7l6#?}#9s*YsG2lgy8hWuv>0p3TFR<+WFb2AD8Kf@ z>kjTUj~@B*%2~gkk2Vnsdukl-zI{HUl-br!b9bHg-H1M0syjxz$J_mb3ni1;>AEF& zW=OZd^N-E=GY1>;RL`%20 zQC&p3-_brl{3~Z8pWum1)X+}tf;pW$LK4jSdsGtZxNx5CrxvE26GcbmdHBwBE1mEB z4w4;5%F#zTd6sS&dK-uKQkO$AqVBNRo}>4e7-8%;lW{WmX8(e?m>C*R&w~*0clD)O z$5EcK@tsNz{RyUXlrVL_IFWaw5Tr@g?6K~dlv)h4vWyZs5Rf&ZydU|i)KQLDfZfQP zC^L+(>!@I_P&3%0GCj5AyzD%H9X|&&QL8x6so;lNr1hp}`^jVkRj%CY>zVjI?8WNP zl&ZYq;TA<-1j4Squ0IH<4BF_QS(X-Q9=ew=5!1>AcNRMg8|?Dk5-BCMb>eTVB=uW_ z9q`9sZb&x^y+8~$H0~T8iP}aTPE#K7+D4Wx+_K9G0S9O1nlNOPoF9Wx?(Bb1l=vaJ zK-DN~MZIE4xk3Se3etgyKoK)~o)LR;@3@#?80K#5-8(@EST-x880yTqHDXPtwoGZ@ ztk!>%>a4nIu@*g!5s$@>0d{}t#Fp~^L##ro65@bY>r^=itr!)Nt|%A;nfEpck47ftQLrN&>`iZ*;TX^Zy96`Z1cAbZA8S;oZX zdp{!7Q^Je(m==Gv`V*>VJ)EG=YUZN>KGsA8mHrS#2e(QV!giMMc(9 zM%+1GW#m#E$V`)xXDO1}xR)}Z6kl!HOU0}dQ*9s{ow&XdcVU96s#@cy%CHC2?u8Ov)^T@=e1(PZt+S}>e* zviR;Ge`K!ukf$)=8hAAkuk3G!k3PMcC9Ii)KD(M{QFCJ%dMy|=jWOV?@)MVXFL~jd1A7#<(eY+!;R=6e{z4q zd`dry_gmA<6B?h2n&~ClsygACC2AuoC(?*4>2>JkSm~|uP0J!2)^zwGNkEkyap_Nk~Iot@1dhIEfJ^K31k65dS z;Y2)k#xF564sRx&|vJ6S_WLGrx!U>0yV1rF$(>hr>5|53^I9j*?#*Nae`-#LY||zNf+K zQ5_CQ4k+^ItHYgI!toJ3Y`qkLQ@`rpJD{)PH4Au1o{p~Tb-)7GkXLaY%V3qTFjx^R z3DyV;g5|*=uv%CYtdwtJWprgVZCl>W{fE1YyOX<{yQBL|nN@wQx2#3h>I4H4{jjWq zLwtg`O`SW`z1cn6y~@4#h6m0LN0pt@IVf#r_A)ph?GEK#;_Ks^;~V2!gSErP`37PA z4^{Ov^u6KU24_syOlPeJz9X<{*f4Av*7?wMgAPye^7qXY)Fj*V9fA$Q7Gb@xS(rXq zfxq?gp2KGx^rJ*SkmG2*CB>f^ekOr&Wa2z}4vH zkQT{iGVf?lp)EybY9Hrx#c;2qm%^)~=$1ex~mlta5_#m-GWGfly#chjrC^^@tgBS4{!+3=Ttd zEy~$A4wDfrRP{6tBhdv*&OdU-7Ym;_Bclu{7ic+yqs+Dz#5f@WL+K`C)mBG{*A(7i zBgSkCwwxi1h7t2aN4T6xBPML~x^?p{y)COP!!64#oh>shuia`M&~MysfH(H9Jz4Bd zxK{X<9U9%W4xuBnUi2f2hOf0#+B&86rFOH_HZX|w@ehX|ZO8JiZCmea5c z0^TWh#znSlOQUX8D@R|hnY=TO%^v3c+^X84ag^T<>{JQuMZ*?ywkJWSCOX#`oIrLih8hZA4pmo@|skPU5pas7hsqSzd zsh)6Nxv1LVs{(27B(9X7Y^=Lsz)wDEMR--79o4B76p3&76~*f z)k|A9&07mwl>%_>ZO^euez?wX_1tEhg ze4q1t6~81y`CcLJ=G{EX6-jj`e_!d3F&cHwF=Tq!o(g@)9vgkZo-=*IUM^(mA_&sw zJUDpBV<+y>T66B%T6ON-T8HA%T1g?WrY$(;t{%7NO}}^%h`YsOf_wRB+2b{;?U?SY zPCn*YQgPFClkYd}z2x2Z0J}-Q378`jA!qm<@=tL8bnA}_9G{SK4oGBvhr_Z z(CE@-huy2uiLgblF-owEU&25`KPQ8}(_;`a=CKG_`v(UaqWusjLeLX5`Aa`Ya1_p& z?$|>hs*4zvRiUYGuv0gH z2yyiCqNK~dvFkqkl(2lTio4EYT~M1%+yq+4xXW-POM3Hv);L^wf0K z^k#Ht^f+`m^pK4cJw(y)9m27U^9blxv2(hCtlCgKlWrnbbtvvuH!rI`6d$Mi1FMql z=h*Hg`ETWxv=YQX9-E#8p+xfyFtHky+OF~ zb#JmrwDpadYg<^wnd?pn!?fRIncA&_p1T!+bgAE&8YA}Z_U_STfva%TKh2IY;|+q z6-k@P%FfNL@AJTDrlo@N6)x@vtMu=VLLjB{NtYe?raW^A|Ci39TAjj$7^t5%${n z@Wt8lzrp0Mep_(vZx_^k(n-o(3#szItK_~=At;1G$)^}erw$(#jrqc<{PaM->kwt% zwt2nSY?wu#UEgWOZHCpn>42zO_bPIs?7r+nFTaL~22N?Bx`<7t+GO(Ij%BB!LNh@p zB_Sf#Ji$m0bGRi!jn~9c zu50h1kNJkj^$3T8(EIVVRhQWk=Q~Y71@EBp^`YCtMRe*Hp^xO#>g>2z;s_Ie9k5Hq z*=n4Lud^-7USY(zr4_9{>0EnqVw4k|nhXb_@y*QezSuAmT;ktXbhB3S3JJc%Pr6j0 zVo(;E&`uv*_<;Aote68?AMAoO8%4b(M)N_!sHa4|DKAxx$*7iouKuZJby07AS-t(4 zX^=vz`TDEP9OM7;I&O2Ei^4u|Kb~w<>BAV14zpXTv7h-r{8)DT=?dW|FW1BAGKgZe z#Fe#LiE5c@rN|u^5Z;IXCxVdx=Svcwx29#NSyN!3ZZH$~6m6@%?9*ohYh6%rXoS^# zMZ;yEi|F@y*|&5>g70Vi+2x9vd!%>0+0EW5U;8nb{Q@POvgF|%lt|2k_dOi++$u-U zM4cT^^VWcVd7G~!ni?G7_N@#%MsW9;bl~6y(feFE-IN~6%qaU@DIZ4v2239 zxXgtWO%3Vsr*p#OxaAs@QTxzVp}1vPJQ(T?`jC;xUksj0s-!yQI%CuErQm~^sL*hb zTW8^)SdC&Onwg#LokN--S>}7rK6u*MCkH@vSCv8cyB3H|2-6o*efMh zMR2prTEr(fMUgY^(-=N3VCZ~v^!)*b!a=UO^dLOv1j{N;u}aekC=A*%{Dab?G92$%g{-=%P~WukAETnFc9Td*F7nqJ_g;Wl(gx zS`pxI2C{QLlMfvdQSI|a1Dht!VQGVMjoE5{EiRWV3fkMdf^HF&Xsy{_e>WW)Z2 zJufUJe8z$#|BXD5`6GP*VbGzYX0PohTxJ?<%yGs|Kdzi64BLr?83+1EGyI|%nict3 zF6h@YT^1aw$eyqTVbq2tGfPKgMw&Lrd<}9D0Aup=*U5_#_Uz=j5=n>wB#Scqm2yMB zCtc1@XUcj8QLGy!P5VCASpsVr6tN6i^j+C-Ph(`SzQ09i6lhG9ofTj2ZJ?0iwCIYY zC5_<1H$q$B7Oui|eo!vTW2Vd3kaa@4R}>kdy0fh+OS`t4q|TQv!H|}>LGqGblJo(# zg3t4mKWDS?Lox0RpNz2xM-Fa@xt~K;gQLZ{21Bn$K89OOngYc71bcI zq7VJ7k5O*!8)?sa1?#yL@x8KeS+Y_%5HxWX>5S~SQodJ@@$6?DGvCTTecc-|v9eGk7M<*4G@ z%qF!YX3unfqViCt0TfW<$8yOXBpkWyjjLJ{s34A%olkFVM}ervK1S1RHK%ysE>}+F zzR$6prj`y8YmZ0)lz^N(70*doLl0s$MeR@zCO+utMdh7*#GOPH%?>@>S^x7<4D7=| zYNLHJy^h`g=_PlGf!sh^U>1@JMSVdnY__18P+{F9kcq`IK(c0}OyXrQ31fF^Esrb}o1< z&Zl_$H+T#5hwe^uKnWljN1}cn0~Q5g0C_S^+rXG{M=Di(j3IFflTK73ZgdMprXk>C zD;_VAI=Y2o(+Ds-fQ+|uC{G+OBu-6PLfQ(D7Z>NU7@wdhNMHJ0I&}XdEF-?{^L9FT z4YI7}BspLQsE=D#brKzzv$`7;X;nVY15bfk6`hm^_CRh$RUWGDOIh1&fDGV5nP~|) zt(d+wYugn-A7`slB|AU|D5ujZ^E#9PnFBqAT`Tq=a2`OQD!!l606PF7?o#8t23#<` z69xi9sOYT9bJM_tpha4%JkuiZE-0Vgswg)IYz|81?G%kGuLM(roOshhcND)VO4|Sy zAf_tVWmK}#h2T#BeLy^RG`32Zj5>t>wgb<`AW@HO@d<5P$_bryVF2D?I5{ zbAJ$bP04^f=59BGMTj2hh@4gF62m-tz&E?wy z061_VXZs5P27#(!wnz+E14`nW^Gx%=&cz~lK&?#EGVlUux1>x+VSoUT1Bq1rraV9b zpo~M)_$E7`2w0g}noyI*2c$t@DNCzmM4GPWCk~ymIt~`dDYoVXOMglUIgN#^VAu@21G!b zv$qSuu77^$xweXf@_|XYrd8lD5H;<7UehKX;3@7>?J2JwoDSloFDl5*0xyEB>7*5v zRR*d-`Lv!|(i{MeI8`-eacMI^R2;javVk-JfD3tF#B`*rtQVFgra=++0}vosPFGZt z`v+V3oBEZoPM87WwNk4K05fh`!%1O)4CJSAo}P@gs#YaAzz5 z)DHM9npH~)_ac^9eOq7J8)MNCy&hTMxgE0pW*$@U%|pBXY3l;hxj+T3?OQ8DHYvoE z_9a8g`8TtHu7-@72x>oH&B*%hH&ME;S@33tbROuIQkz`Yj27%?{UIxG1J4h>tkKQ?CLhv%l-K-43x;@@ZKwG!ePKUAp?5CKYN0?U=p zqg0rMnF z=lQK4QroYgb?kTB;?r{#W!5g6T0{^Qfpd%p3Y`anCw zf;#Ei{v}X6omTNS#|%X|P%C@e6EKz+1o_EdV(m~2rUOyaze;{K7#Y+{Pgr0I1+#)A z=n3;obHP}kV0yxQ(?T#gC|&8B=D=3~0t8KQ(mk!iURA`n>{s-+w&>6I}E*#MqX1$aDv3Z)hG6BnS|^uA)nvV ztwS&I7SSJR5}2QCHbJZ}nsc;0_czDs`RX2~Zjjmv3 z0zd4xWst6V3?Y_Oc+CyOejr{uOieORfkTNnbZSZH3 zNw<&XK6atK*uj=`q8~7gq-xBhIy$e+HbcpIE@p4gr3ogTKKkcxWOjEPA^|XLxxb#( zzty&6bXv!rQZH9Qcty6HfhZJIaIBH7V7zwai|w88FKOP!cdVNf&_v@# z9|NAR72E4(I^?5q15S`vZrx*q4FJH{OO(DaY&#sBxq3NUm25TR!>dwkojA+%d6JD0 z5WBQ(Q>(P`7B)H#aj8S%d{x$Lrr1-rSF;F}xiavle;}$X@JH!S7L{|%(XkJELa{H} zeuCUd_xrs?Q=^`O0G9US{ z+^A(0=fhd>zVVrQ_UC_ErO35;Whc0W&VSCfJ8y+Qbr0!yz`T$=Jwr9_JuQFLi|yZ& ztBb@+mE`u6Kx(YD85snZCb6N|W(PEyh^RB=`&JwK2OC2I+P`SZq@LE6>rwy>Sg({) z4U#q|@oAc}JNBp=#gCXG#I`d|VzYa2>vNcH8oM1vE#s_d!I1q56-S|2pQ@dgwJq@- zUzS>ks)-j6;Px-EMr+C(;4rJ1R#&vJ!`Zy$Z@()$XaJM8e&&?6^63eGtc^{{l?R{v ze&1&&!$mIbRLLY7g2|{3ogX(A4cWMpzYyh?deQm2I+6l4OqA_^vL^K{6+hL zq8G&>|L@)D4z{Cz@chO#q_6Ao;)(C64?R{S%|>)V>9_t=+5C;B+Gm2WVgZNv_zsNR{V0AVeDb~*%IjmR+sPm+Qitb@J8|* z*`s%--f2rk{4?_k!kfbO!mVwLizoNNdK^mCbTU>SC$0&f9p_#APP}jD_2rq%3eX~6 zg4Kk#akyb{oU2JDRgFeo@~MvV#|$+nZzO#A;ZBCz0BXS6 zJf+R*Y9)Cyv)Wx6gLlNgdH&&}j^7sxF$nt;g!x^*>oj#xVr)fmSXm4yZALz();U(2 zZhVCM7VHXnh{%2EiY@C*&as}}nybb z-WN@#WgvOih6(F@vP4ntKxMFd0Dtv>>M{ePbS`^Eom*nDwxp?FdBleodA z!B@)@vzBD6!yVJ&w@|FrwpR}#ijk24NC--N7E7+%QhcclQAQBOGl7XoEQ&fGqEe=r#RyMI@PbeXzB!QKcj+l6l& zns-hBP1zd!ZV0nRpc=GeD4wo8H5ZtN6aJbOnBh78Yfgc=U*h)Q+D`K)e)I?-;mPFr zKUU8Xa4l+x&Yetew;COI?xPcO*D(rEwbIR{MbI_pNmBeT*SyYf0yA|A`7cq~5-HWy zf3{5isV6P&qBpA!%m=G$U0uLSYyDXQyChjm4DBztN|~xN?-oohLbS-^=MD$)lg4&X zFdh3;9U!d?+!@SmOXnX#c25)UC{Y_)(lh_2?knX^h&-exhTli#$E`FAI((Pz*?wW7 zW2NiJ&CERNSa7l!!;AAR;AAJ~Cg)D!s)BG;{-|8$u*t4E=ado3gz&pEKy5V_#!d2M z#BHDPk!2D<5{zMqv?n|AQRWd&#fouJz*rU89VRxakiM5uWKtrvIz`>2P(!!Q7yZz% z)A1ux!9J6)i?aR(iU}ULfn!J$IzpRRUFyOw3RK*%(U{orZH}V|3QQq6$5%t)ei7U} z(PgoKohnq=F0XEDqnp)GEPU=CFXNc!UR8^nZrh=Tx`Xf7okN5Rc|mP$j-o%BUbL4v z<=m1>+uXBr%iG1h`_u@u0hFgco`n0%(;GqPw>)u%U)HTvzuQV!D66gW`zEC}zFX*>j|2_soRWm%X3o~LIUU-gSL>Mi1y zOt9&3OAWH5e&eRXewg|c#Lu)S@pPKiCmq`;0?L%*AP zYEaDvdF45KaTHbB*=7; z22}}jgIFTWDbI2P7*U{5fx z;_5Zn_fXt5+gcxsqvqd~D#7HZNEWIr=1i8e@{JO+>|emTWY-5D648L=b+^aacWqu1T2^CO1$hzk!;o7sLD{QWj^on zP>@mNSiH7Gw|y}v)pfAdnR9J}3!FcZlf8t+i%G{{QYd4KgQx&QUHReOBab0{l24A0H8_)hF2>f^f?mDXWYMv}35@v{DhfjaA;>9mL(=C*s# zC6@gR_v)rm{uxOmk@S|^^I8W`3+?BM&&-#JKwChvQa1iIUL4E}DHRttR z+mDg8f9~EsEfY*3B-}eF1~5LMD;jufBR^J)k2BDR^YSR->|RmG_`3XPWea@m%8lRV zZi)o8xu|{NfMIy&U{3C*5N61aMglB|h|IwzoS$RJSoGjR{*CC`jE@(U*NQ8bm!Hx> z!DZ@`7065Tu_G6X9w|{j6nnP>6Hf(;ZG-)TwsAyNAL=HP*@Nl~|E6M8$Y_MW7XLqk zT+=HtmV1LgFbyn-7wBw11zht2RVGD>xHc{w<>WXtj6lWNjXM z5lZF#Fulip3&USKn*O&wWDoQBKWMW?)%_3dHFmSzq<4=M*U%0YU(ktzd}~Pc1gj#v z4rAE%zyI+n7&&P%Es%V_fSuy+Y5tRhHlrnM4E?YlYHKh1_o@c{chlwww$4KH<_N{o z#wquan@^FwR2=3F-Q^N(lke~7<+bJI$sSMJ6M2Ovgx$RDqU~|q6j)yM!qgf7I6-Kp zp3UF?2bI3Rdn*N@m92f&Js9fwBn|C%x)8bACL`jt;gED4439inV7Muw+SXFw3zIQs zbMw|P7>g>wDPwNm{$9w2!FTllEdeafuC9EoD7{LsUnOd}+^ICb&InB4&3G&ZBh|68 ztiy@FG*@!gCKSm!IdJoXzhng7{9?;V6G+hSfK74>K7EymqW!arxwr4I@yx9FCTJa) zqn^KI@k}p(nP_+%bNNw-gr9=WGh9S&M8kTCHyKDHNFv9W!v$}xhpyOT2Cyz zk@UIAY@ErYpscL*IaY1t{{p$Ae-fpKHxo5_37-6LvT@?ay5yRjsg-4b*IrVh&OSq(IDa}3)yMz{cfl~o|_cKo~hA%VM z`2RL1_6AK46&+l-y@X$hcmqasL~>|c7+l%iLHip6j+OYFoo#X1V`y^Tq%3o*3W6%m z*{R;_r$f$rqg~{Gsy||?UIth?PocitYW?f}1ZhHXQV{xE%tx!UB0P|es)StC2Y+1W zwSXTWyRgrMWvjS&0_diNoV*@)#rSOMoi7Lz7(7xA?SrIWG1=qbjkb${W!Rr<#lo4k zoA81@+>_5#9a|SP_q>1sdQ>^lxxTd0K{lpcUxRiopSnZQdGL|jJXnxQ;n4ZvWx#1U zgHq@~0(sd8)*m2Wsh~)V@=fUe^_2U|i&VV@ukL`y<)d%{>ujCTF(|9lu5%%EZ z-1c?e&to(=ZHIJCZikzFXdL)B#9;kB7~p&=dyx-VT95629gQAtYXKLu-4*c+nQHk% zVDZmJttJE5%886JZWU`5&m-V8%h79enRun{>q-D@{=sea)i6^&5h;4j0d zn>aFV*@as?uM4-Btbp}~p<7GwM-rl=25)hCskJ=wzm2d#tkR2QFJ#S|x(d zmB%>}btLG2luJnvHQQvipvFXHV)y87Dz|>}Bz3_qLV2Mc01;JGpW0{Yu!P)g@zJw*`^YB@G`!a; zwJcZ{KsCW_*ry7teCe?sey5HSWRxH3^_^ah-+;Q#bKHyfg<)ACc6dGYbJz<-410D& zBV&gx-Zt9e&j}(aZceDmJ29NyN-iQLjB%F+CY-qDAZzfWVg^suH#%(LQSI2oTJ`w^ ziA`WfvjhX$w^(v0rh0Cu7qV0Qu<6Ha6wj;+TxPBIfxis64DmdP1K8!Car&%gS=vca zsqm7wK*4nGM|7BkieLS-FA{O}>OXIk$A*qrOOB^1F{5mxS8rw?wo0*M63j{buC9|m zvYf6rBzch4aKWiz5T5_)NvqYOkpc&C7{hI-N{VZlJ`{a;lo!~nRfjI-PAL8vipMy~ zT*7NYRvO3EE-eFttl!^=Tr28AP~_j@KyDSZo=vD!s69x63g);qw=^QBmpAUCwS2LW zJOiQQ--Lm0IzJ2>cLS>QJphbs5{Q^PC8n=4;yWvUy5vzfuRNv3HLxlBd1BIm(2 zVw6}1-60ajx>R)#S%cpSmDLrJ$~rl_7^UUx`wM}|T9mEIx%q!CWcoC@5Ivb<%H%ZW zzRI0!0SmBH&-K5?>_UfC7@~9?0bins*9vfnKhQVFuD9wzy?Fnqov`)Oj7_}tGMiI` zwsIlEr?Z|PY>y!Je1kSnY6Na2pm%}N%EV-Dfpc#COo0UG0!lhacO2U6nx~}H zC6x!I8je>ss-4ft<=Cl;8A}N{t-h-<-;T>prU!n?Y|UjT(h{i7g6{8zD>E=A6Ux1n zY^+JwVFG?{vb5H!7|!b`j|r4A{sgJYEixY%4-d~OA{3_y`0NcaRuB4|KmD$Mo=Rsh z(d+G3=RBzSPje+Us454e2#*n;!Oh7G;`m9_~g;yMxyotPy3L364tIJ1Xv>E8Th4)-P8_?Lq zpEwIsJU?3EhU5ZmievIxfBaIRFMiw2PFZK195mtKV`s-0U%D;y!3LX)3+2vClo4v% zt+J~uXo1J5#=u<-rM0y1FV;x^L4Hvlqz7H(eiPqK+S;R$bhvAGBvhQx8(M8Q zEaETUcYuR^-zW|m*uTn&PmQa%JQc5=RjF0K43*!z{p*fQsR#P%{v4_WjK; zPDUsRHcw=gB2xk~Hi3}O?cY$!q~qfd#9wSx6>k=Qp(ED?^=W$k7iDJ^R5#Q%Y8;BY z6o=v)cXw!UDX?)X?(Xg`g^jzryE|p$?ocT1?r`}2e~w(7i*s_3$z(D~X7XlbWv%yp zo`3^%G!Q~ESo)BqQ{?Eo0Oc1J?Y{#=10!^&vRrPtGT-Z2F{Kj_C?P6Xpk|B)jirwI zN8bX?jU`^hk}(I?XsLckB-SdDLCT_UHyjPlE-v1W5QO;?QH-0ar+}K9-C#(*yng@c z?envj&9zRIsitF4FibaoF9L!J^f+iJj!I|}T81HqT;J6t|IeX!YgMlW0uM9b1@=$u zH!q4haR4GzSOfg6e~!2T(avV}&=iF`{HYte0;G_@?$J20L2HM&W>>f7yLpKS@y5z3 z<3;y5?n9<)A0<3BHSN$y4v)>Kg)GshRXCKgb$PXkr#o9=eT$sPV;kWcI1bIhIWk&x zy@uLB6q`W#Ye&ZO_JH5AcCw_@XTI#YEo<`X>I)DXUzx|ptWIZhU3Ucf>NcxrgP80X z<6)0pIjYRs_#eHhT(ZAvWfdcvIkY0F^;y;Kap2>X;{`U6>#$+nkh5egK$ybY1YT*C z`69*SG0FE*CO+jGTO#ytnp0hhT}K1zLCRw0FVWvHpHqbwhhgqr!Fyf&dJbDd;t*_K zdLw&HjCJlUVR&G6`k~fWDJU_aQN=KGNfZ?UxKav!Rwdj22xvHl&zb7hHB)$`>ihk9 z%;SFBNupzgK3H6zjUBwLSTFrbgCs7tSMf=SIuzz9>mCWZ`%;H>Dx;HpQ+jFL153(O9wG&W$;G5z0Myk-tAGb-3nPkVDAZW z0N9p;nvC*qK;Yr+ZA0PNf;e%5ICzn^Z??1-@SlU?QSK#x%cSc~gU+PtWra9E-F_7U zZoy|hpzhZ8=mzb=W~%o_Ku+Rs%Rx`3K+quZv_KxXK^_p-zLRgi-m;)B=>*LoexAew z*WMS&-WEau)xH~w#}9&M3o}nVW4eu5o}wDvE7s%B++q{1LS-(?lz!*NT3~fOx}2HUie@=9+a0?e=hAE z$JNzRKgcQkv=qMXA>(Iec@-~Ank8H#mwpxYtFCxGgT#@LAxijEOf^s|_8h$INJpG9 zV?Bi^W;dIw=xoo^NsyCbg>(y<>@0r~K3k}vvb;i{wBv^T641l>fp8@rPNSG6B_xalhgNcv`Ht|^ zQ5m!C(3a5S$BJ=}th`G?{ST5kR|3&%OLL6+Ey;hniU+=?Gl=a$TytEX(+x*z&opoO zJY|v9g$GBn&)L4kbBJ`VR4Ngwvq!?8kPi#F5)m7`725RgnV4H1q_|rcdBq1Ri#p`|tCcdd>;GgUf9Tg_>9kLd}xrg&_7KEW7nszJxm zeZ*6_Y34Dq=0x&R)?AE*L(X)JMPPG+N((BjfAm_C8oLio&cszkF3!*!-%pOH+tawF z8J!t6qzI2(8~3xE@n*Tnd{aD<#wp$AgbyJ-!%6a_KOKIV=c2EA?Q!QN@CnZ>v)q*L zxoO4?$p=h@0{)F*RSns>ma9g@yY?tHMKm*BiX4%`l1+_4&D!n1vxl)R0fJ$A8zH{F zea{op$0Z+k1T}L}jbVEEU5#;i#To_-HRLwJU7~jHbZ_1k-JO2AW7@Y|UC>AZA{Vn> zw?aN5q_fhubUsr0y|`#|Xr`z_&#XRylwtADus&;t<3VOqdpbGfTLlKD*ZqNc>pbRnDMSQ(8X zAVDw?RaXLU?yfc}WBSD!jnEJc$v|3zQeF$`;St*Xm_%p|teCs?t7apXE5$FHTl+oa zrg7eP{Yae0Dr#lK@4N!9l3j23O2g3Y)8LlvT_>APob*BPaP z%3(gie8}(_?bLBSb|fAX)_YUq>O#@G`WanhP7W_55+B`e&B21D7Y95K#J6C6Q>GOw zQi!wzoPVC!Bf?Fd=h}RMn~6U6f<{|puhG10J|SPh-LJEJ_k6KLBknFxf-jVB!PCqL zVBwC0Yj&Q{c~XZEegAPMm^;4MuW$ZB?*v`%gt4yymp}M|glUutRGq()d;Ak8;fJ)+vTK{KO3DmAVwhW4xR_g3H-_8Mlz62#>sowi{IW)GaHcU!0 ztf8WBGlckmunxbS@}uIOt7vVPmJb%>Y^kl+o^nv>{!zY8YUZKT{L!~iSmdVOPHS6O zNLwZ&YYumghFgRE_q#?>$1JpTNMc#zz|FKK9q(3Z&1NV~?`z-VTQ^1_KK_Tk(#xFK zAX-1%MRCAS(Sjq2UF|y;#XqSt1yZs2dYZ~Yzb4~Sd3CjQqj&D5ETSo`AYsk8)IAf=OnaUj1O4QX(%FbtJ z0R(PwlKsn`U(^+DNWGE--D)Ay`zM_)bRg;|I^cLcvPXd9`KH*g`@3ulFQQ!SeQkPR931 z+jCp2EX}mWp`*B5VX*~^b4~4}#>q$mr8!UMWhicPCB0@X*&|k0vAf>-NRkcX--+}S z9LJ_0HJcN48*Z?pe6mZim5w%>TUA>#SMac!35=Y*B0hh8z7}EB??rRL#Y#;z;&kP# z^K-qd?nis}_3Oc7IkyLr1p-^_MYHJ@B-#ZjneeryeGRd_4QW5~RJKK#x}xA$88%wG zD{J<@t;}+zgqGnivNDM-ZN-b?yXa+ldg^@+crj5`*@qU zlOHm%(|a|xj;iGiCNdLN8|}sC!R?Xd7dbN|hjDmVw#+UUj(vN7%Ag<8a$!>`C%zj) z6zyN(yK;H2x6IU5r`=dEHhS>Hu~sTIRGX$hd%gQ<6%&@j<@OwdaeV`-MGrVs$p%VW(_~`BxtEP&H`a)sJ zy=?G9sXL-JB1%35;Vze<1zm5b6yIOJ?o3fM+8-?uG z;P8QQ(vAV~#&v=6j4!6%-c@3z=>sXn(~nxob7I~W^s~+iH>noa@LBWLi0SwPHfT#Q zD}u80C6#66NoaVIc??$?W$No$Wm+z7oB5^9{+1AyWx z>IwN)R>CK1j5?jsbq#CuTl>IL$ktd2CvBT0`x8;^^Ue}E%(~yA@J$Q&@lq^+87dgq z|4A|wtEcZ1smK?dqaM04&3#YC8>`mA+MndikrzOm{7fQ!RpS1t(??eqzGIZU{{0?e z-n)}9Y*?dhdX+DR6j}A)GjSYDhsFBsxC~1^vu;r|v8AZ!5uUH&O{+lUHQDfE@j8J+ zrqDzDU|w#n(=pBlWj5P2Tsn}5_ zoniD#915i*2?@zZ=9yp3scSTM5d=QItn0=0HbhDQ;_4i;3yAw=G${F93LU@7KsTF6 znn?0Sw(~8Z&PScfa8_;=Ve)Qk+g=49S;_>+Gen^vXQ>q@xwKS>mx5_>eA$lY6z|0! z7*=wKd@jttZ+J-$AR;w1to@_S`;kYDAzHp++t8-nsE&lV+5`rJ&nlZI?5f(eVFKox zpRyJLC$;o!11(MWGEM2(z(3EX(;&5e=+^UXE%?Y-1cH(=lkrV8(h1M;KTdZVYkME9 z-7Ldy@iPXh*D_tD!GSxtg4Awye|_ZLV4h%2T6fm}a&0g#3$qB)6-%~mWLUyvJ+C0} z3CdmgJ`*rDZVbMAG)o4&m;XI*XQAUFFEJLAknuJ*us2YO7i9Vr(W4=`w(}=1Z0zA3 zM7qJjJ%vcXJ&w>&^8`T=^(njjD2&wIN%F135M`uIwsfNK*F5pguA zYG22~&pN(v=8m!z{qtvLQtNKvYotP%TBN`giCdKESD9jSK}H@jR*_H82~__Md`L3k zo&~`~yYmsIdaAtrFZO(-fqW$6XJWN_wA{Q5voM@coc+9^j1VFeMu7G2pl-#dn&f5b z5;iKC`|nmwMWA13v(wyVXNcJ6>E<2UKW1qNWULQd%XnE8hR$Nz#z+?PSqfbA%ne%k zmIxANscMYl<{|Flci!aExR~113iEiy0wu!Iz}- zb4TI30s>moNZZ`TmGXJ;DC&(BZuIBaS^>lJ@WJR18jc~#Zog`(q}A8W9-2XBwlNHx zz3)~8Som1q867l;TS`nCqod^<=jpT4qw8r$)fH#s8@R(!Ls)y+?nn7iR&p}%>YW#| z1&ie$bq`lep<$2CzcK!?{tZX|1QWKKCpWgBn?6B6M6f+EL_~~DMu`ErDcO|_4GCFJ zoY33;M9!MW^@h~r`r{af^|_@vIH|Fa_98p{b5K{NoT0x%h^;?6J~18pFJ2mck>Vfg zh^Zv!|1}NDV{OBkrA$U7w~aBap8bn2R= zd)nUSC@7MmFTa<7vxx9OY3gqD0*k_Ae{l9R)jRB0wH58s*Esp;IN9I^yoID{=$8#C zOy>4bv`L&IJjb!N5fz;tn+9{jFB`Rr=2S6e{`#UyE6E18c;tfv-IYW$zVjQguV?lPTGc)|m$;sn2idmOLcg3L)r_7o!ZodKQqg=?zhXRd^rgxZ9zjv8 zr(w}Jrq`O^5lT{q!A%Bd}wIN>W%6ihjj{Xqei>l z+Z3Vn@pHWLkShE?co-g=_q=z=?7t0sKdUOnEH=B1j(k+5D~S@{wNvufAtea{#NiYe z6qJMkS?oOQd<>n|O770?O7`ZaQswNqni2-9xh4bc8?HCUf3ny;V-G=#Ty1mPr=$P4 z%-38u?>wCvZ7`B&@U9>c)+ST9RwWtZ={>W3Fi4!M_^LdEVB^ zr>BC_5C1~PSTU#Gv!_{#YU9~{HNbxyR0rC!j7MObo>)1q%wJj~9=l2WY}`9pPM)8C z#!?O7VynDJWV``un1rN4TLDGCwh_(PVAGas;aLiL* zW{~)(3MKh}UaXy8-&c8J7mmQeao7>0@H!flkhu(&V^kSa5@Q%Px8l-W{msM3BmZ+Y z*P%X0ZH zwD^tK&GfP2TU6(4Zq-%qHI7dKwDcKJoBhVlN zo4vFrJuw9zKQ%QaG4liw(Qz+((hV*{JWy3T+M&<%2F-QsNpsU8lr@T?%#4yBCE}Z3 z-0#eU_jU{wBYTqxugI006o!e>k16<~C=<0c$BNTPZ4`x3d4-ACD^;A1um2L&q8&;Z zy>ZyqC($+Y&}o*WU*HrK;I-CEg&=K=jEq#93TS%}oFtOID7ygbN z?j4^jEjhj#WE8`vF`kh&aq=6$OX`hko{FrT#7x*!kxN+3le?;)5emP^RUA3Q%^_2A zGITkL9Q4*Z={BgLL{EJGZcSjKu}di{rL&&*OKt@JF1O=b(K)m}f9cGG<3I!7KMkhC z{Vdn!77$<18?r^|$wn7nVPQ&iqOn`TqXJE%3WsTfA~01r zi(fv`?!++N_M&UXn5S1%=ZkzD>3ZBPdpuRx8R zU!2;^E&HVV_%=&6SFB_QC1oq?T6Qq|F#ZW>L{tSGd( z0e`pDh=jC6Cpz%FvHd%<{uUCVn{@hCk28R{7jDWYM1JaH*RxGA*(T&CXj37iuQCB< zxD>P;4e6+dhY5S>;gMihnagN0C)u}ui1RY_r_h3*q%$Zh6RJ zpE4~oNSe*Y&D`I$ft{6?S7@D|o=1>{ap>F(4HaeTkIdB#(wACpJMO|!;mu_1Qfm|& zT&-y?7t+vg1HJK$%rZiDeU~I_#%KIVp>wlm4mblcckXg4tM`puZIYfPk6LOz<$DGh zf*eq`JhvSE_?*eV(f-lV5>!lVd_0tbdOuuu{Qn|5y9=u%4h-;Mdqh3dzP?KMwipU{ zeOcIWdr95S*Dq@uSSR#)9!*>->c;9^_q8y@-djj-gn|ND4JA1Lhgv?=*YY^py;2*!Z6bouUJzQjMUeA4afRO6bSB5zQv;0KIA3p> zdF6D!H7{}i`B6HAVs17iJTa@a(QFbg7k3|5E{hr=&{Glj3nr*_Skj!UY#=vQ@pbWP zENf;Rr{!aFx76(zPQfn?opTkEx)4&$>J ziNAnS(umq@X3THdb(_)l6K`x047jX8n6e&>lmZ?#pT)ShVW>Z^yebCxwhm9+jlI9? zXiDWuCLG!@OM8+xSw-Kcx7M(_E2zkOswP|$7SZ=aoGTGJ^Nd0^^h&T0>oDCMex)UA zIcD~J^w&={N*LQyD(4#kT!?bUu9_RuEpQFAs~g8gimSIC>a%5HervA4V8YH}o!9gs zTw=%24}53>6|(Kn*b^ZTaO))^26=Crb~yvH`z zKw99z9SbAhHvOFK(QAcVwM{q7EToE=A;EfS#G`{U|D!&R1{l;hPWZn}!Mt%@TQ`V6%k+Ns~a6%HdfzDt5$>5VA-kNXl<{O6KuEjj$Zu^x`)wD2;{D+R*H0eMqJ%Logt;|)~JwSeL#&LBZRUh)S zzGxF`s{0pl(FvtR;cCWnd6!{a*N{r?D61qtfMm%me5x_UU@qa`B4+#&H}$J{<|2O- z-W|rg-ib`s%u>;_yUqns-Eh_X7i`OxsI*~jjv4pZ_8;>V6W#MoSf=_ds=tqXI5i_e zNYvIMJEoZ)3gl5!-*_|l(%yK{Y-lxug}%}-m0j?!lHi6l#ZAA%@|YSv1^E=!9NaV@ zhb`OMQ8j10Mf*0HE#B;Au20Qs+&$^JQ_G5SzcH+uc8>X==r{D%4vgr^o}6j^H0VLN za&P~`MPkE7j7&y@Z$lbhZ;PW(sNA@TP@EBspUG@Pi7FJ|h{F3I%a^;=5+!-}n&-aj za|M|#s9O`t2m}gE8XWZ;q89!bjvSHmO5mch*+a39IrRDN-vG6eJ8X^1(6;q^X6^90&NMh3nwj;WQQy!_ z#Ev1mp$TW|>)Igmv%$FWKvi2g`)S@FGTQH(_@n<;m)T&eUH_N5EZxEzu48byYAQc( zRQTr$=y^<_`R->KbFQxx%H5K0KIAH!gN!z+zSKz5RCv(yPn}P9*(T238}x%Iq;{8P z{-#1p(64F9H7R%z0PS?vZ75%|y1$$a$AaY^c94*^#j3OR*Yftd*s@}Q+dUE%Tp}2! z8R;qiKil%P!0ZBDdi9D93xQM!P$1Yq1FILGY#^#p(jB!u90ebKbAECsQ zAN~-5KB3s3*2}D&Wc@;QeHXt_IOM>FxK*_)ntlO1h;(f8PsuLmJfGzFlkuh%aVv8O zg{XoTJ}}CS^0X=04gBYY_e39mfiU)rdiKQR@aa(8x#7exDI?@^rc7SWO76*=}T zuknyp;9@h!@iBg+*$Fc2+FG9}5{s>%gwMR<%!Bam z*O4K9&t!M;7llqV?d(cHc>P6j0?)cq@ig9C7#@eJfo;x9!JVLhN)5nH&ykZw{4@UP ziEZOU@~@@$2*;&f)UDi2car)>XzX$yIYtv75i=XC;S_mn1Y~rS)j&3M2qUE)nfVKq z&#T4mJ{`xh^-y-G^=#@0qX!>cSijM(&+e1TiY?=!89gJI&WRRf&uAu|(Nl8%S-r zz}6>!5AXzfSCdF|w+w!9$>E8-aW=_uhq!5nKAFMh~Vm7 zwI?8n0ek^{WUz<5wU_u?hH`_gre2A-$36*)eUCAYg+1hN5WHrqkq*}N?zr#c2m(&r zfocXFgZJ1IErLZn-p8Fuwv9!zDn6J+bl2iv{4f2msIIm={3y#FCX19_q&lJEsqg*q z5*9I>EUEW@Ub$TeSc@KvJlOW*%7drE?wImZ%FLEhQcDnrZb03?ibRJPwoUZ4x}K<1 z`Kiz0GJ3nV71C^<<9i}5CaWw46_ijc`;1Ljk9vArgm`>Fz97^@%PLrFmAi!-JxP7m%1mdhr`)#XnY= z=jC+e19Ek$&0_2=%^V`$d4{P>VZ2KvfxB(2B`!%P%g2z9K%$UmOYHnzCu;e4q6o?P}4Z(wi2^sgm!OR3LWY>AjE*s9Bn^f(;!(fnRPR$ z_2IKTdj(etcDVcK(qgH<1{`E}jj5J)G14RYjQI!~_6O}G?P_{*wIKgDE4)=j*3UxJ zLI>c%!68;@m0*(kSCW|(B0uJQ3@NC^K%}~2HP%?;jmWzVwzh0m)4A^jjAO*HUhewd zA!eJ3FPU0Da{8|5T$~%j{IbO*hq1?FatwXuVXg4Tk??Q$Q+HoHHaVC4KI{swygb^n z?k{_(x6cT(PiJ>XBS{6-({cxnm+H1q@N^sCnpXGmH>l{Ial zVWcq*!eDPjn+ffh1M+doH+$~S} zSEXJE{pMPHk$(${o_*^th_b7M=i8QUIm5P}4--ks1w~BEtFN+E*wcCHldpXviD<%p zM^gb%dlDxu?^9Vzv#a@tani+93O_cq&NiOD_VKE4!LbN;x2aa}N;ttuSABK^-xxNu z;dn*|=GcDc{mdh{v3{k-`hy*Kp5 zc)N%kYguJ(|GQ_oXvRp7o+t~h*OV;FLe%Wx;5-?Y$I-47>@dctdPXLZFaYC6bF%Dy+D z>_+rGZn!;xo@uLaQ3}+ zyNqJnmpTaNb36a$;*^#?pe`w{F`{y~yPbIXdEM()J>Y4QCN(XMQ~Gc}32Ap{bnC4~ zxkHW2-aeU2bDuY0P4bFfX^~bs#YQ|dy(by1hqAtWb79{)XdL`%z;NWrcYFykTn=gK z$>-g&@2M!W$dxIu4%bU znym*P5#2q0qBp1ey+&ac5ZEW3^OAST;LVW=c6BRP0Nxw0D-(^^ql&TrNAATl8vc^e zA*;K1%Dvl%0{p@8aPIwL8G6U`#oTJpLVx8twF>SeyJLveiS)Gcf}RBQP!{T*)7J#I z)2Vdx`t2&WZR8HZ!n8|S|3+}(Ce_ZmRrrZ%{@RD|;?rX`HCz|7s$R)C6^_;xC! z*=rZFgIxmAj=|$W#z=A2Pm7Nyecl!Puv5zo4DP6j8n2deE@XDjX(6Cp4M(kvrNm0}Zm z+JAgA?nG0~$;*DGwhTS!%-Bp0HM0KLf)*HLSp%_jEk~7+6_Qi z-P`%3!nsmLg_`&DZ!z??a=`fhp?xHJh`>H0z6w#7V7!BDeE`$_$TsdEy}4vPwq`Dv zl){hc#b;Y>O^{lC9^mTv^q3({iIKQW`(o=QanBbPEgJ#Q8RWJoV2$gEdPWT82*d8H z^R3(TL!Yi0UE1AsU!aUX=*XjOb~JpLR*%S|&lCrQ^@HBqzn&#aWm#+F3%V*%JQvcK z7x)C5oqIh>(B$o1R@DSvaQ`M6PQi{oxVPGf+y5l)Dx{Kk&q?j9SVNQk^{OL0sRlg$ zQar{}Wk{MCYmM7K>>84Fl&_Lj#dFf#NId81sP;uPd984{bGiI4k(uYTjy|}lsi~T4 zX?|)>rn0Oi+3Qnerk?(iv$$EJg~SvP$CkA}Ca7e>8WZo8gK{s3WPfV)3=gDv5Scpn z@|BRC&fb47c_u?q^TRD!0GoF_vu>1j$t#vID|Occy>l^NPjd$CMoY*Px$oGNOGt1Y ztn85Ty2(Jx#YXFX3F^mQSJuBGChmE6l32gLhBozD+aNzzV?9C&IpMA}X&$DCJ(k+G z%fE=7Hf*478Gn^5cT*`8(gV!&5MLEDS*0MAyQ-D$f1f1IVs>qn{qE8)Q|-(t4RNl3G5JIsmBmf( z#f(Cq^traRFg;992s(LWp+}>{Oows8SZw6_NxB-x>s!hg$CE(6VSQ%~&5j$sd#vyt z@>7kwDJl{B81!o)@l&lPe5QFX_Vvj<*cCgu(m2aRe#81a;(hvNCYPkP!U3}?>p|D2 zU?<7TE!?dhh^k(}9lkXG#|Sz5VP-YOD8G5}rF#|Ry1jg{&C^Qq^N?glKff$hTN^X& z+YP^%7k(F|;xUE;l+)51f?v6}zemxhcB)HA!o}@-qjQo*&abE*8Q~cO z{r1;MN~onBx9kAZP9cmK^PjQlvTz8jTe_ncqG9=Q-s$occlyDQXSUtqX^~g&Mlvkf z1Kw|ZukQlmnnRZ}VKcO6)Rh}{n9}1HxTukQYqC6xkj9Q?l|T0MF5N4`H0=_$NX;vT z!e*yt?jyvXSt~iW@^pRyR}E>h0!x^(K-u6QF&q?b*E+Gt%f>+8l}7#3GJ=FnM3>5?NC;- zDXgAjb@WoN`E1UR!4pZpGMQ72COe!FUa+rnyU6wl1iLLD#X$!}LtHpXiEN0mp#8P4 zMfwY?F7P!PI;hIdh(!8X(ma* z3L*+!fltvJvGk{Qc%MO?^3hvgY2_NKI+-`jGj=P{UlfBtz0>5a6}joT<3JH=calu8 z^t_Up@3nHicE;n?Ug+aeJb2zwmz3%PXzR$sz^#36vy;;*s5A6Q2zU$~P2E?+ zi}SZ(j2&GlxK;fJp_oTS1m>HeDPN77&jTUEh)`VxES7wG49aV}N| z>tEVa(&sjHkoT8A{6naDGxz$UuA9G|U>r_^G8?&TI8q-jQKbGUOBTU<1f9HRgC6d8 zZ=&B_R^vrhm^Tgih|ZyyY?ughphE3V_bxktF#sgC=f)j$OiXHKK{0RKE$cW7Q3|g$ zyfylEP-_F>_93{K>W4Lak7yMwY&pc><6uR!X@PV<2|E|uwtpwqRy58fzcPk19m>Dp%#)T3PI0vnJv25lw-y@nc zf3@-La7(mp?54tp`Y_0u5P9XS^jb{+w>E20E8$B@+e0`{TI4DfvK)JFXB8dTL*8>j zXm4vLb8hv=FU1I3sc7(_Dz6Q1?`5<0y*v8#J+6oF*W#>u<;nHlL%^!srxt3ld(7E! zwYr#xu&=~gntMfj(A5j(7(MigF|#`5#Yf7mPFFwLwS1uKj}HdnVo{4~`I+(wVY?9i zE`Ol;ahX26usLJDuu`N0R6C}X_oL|bM)W;(b^&);Q+4`Q|J{=AJl@HKXgkVD(!ix5 zs%rJ8%q->o73wNe2>*!N`0bkUjx#n=|ChF5mTb2m_E{=V(9Q2}-4{=lZ)jWHNPZ>o zjV3!8a9eVIvPaj1qBm>&go|r>zQT5(4a5h5FlzG-^F&5z0kab{KOYE}nVLeO>$hYf zXczv7I$7Tw_tjV5L<1VlX4A!o#4|ea9Au(x!)Qu7BTLuX-?fai{m$Q7yxnYLCj|Gh zajCSoY%TX+Y_}dVy6xDDa_D+0ywRH9NSYG2O!7O=ulYX(dPSgbz6;X1jQ_k=_pz-q zA;Cv{w-+v-qpj8rN{b|w1l9Sxj*6l^7$nkU?FJRhIgl) z{kTW`!b8zdp;_Z!_?@KvDl6z5y?~sTt~PbW63J&=CGi$x`S(UoLEh+ZS`}y`%k(*Q zXFL3vgk&~g;E%HVg!x5W*|z&N+6~+|53O_UoWhk@oR8_8ViJ4?hFjGgq%NxG z{P6OBZXV6wz8V4wYhw71tZrTeBz>2pZEi)!M19Z&OMdf|{k{h5B1QeaS8CTGmDCtL z$t7XDgC8U*;LjH1&z=HXz*v{1WRssSNU7xBhn6ma&07~C-)0lfVJdpLT*7DJlPbig z^~7n#eI$sWOK!zEHhly0&-BB$PFo_G&$v}E&a0|s)UBHidbu_+yw1@6zWd_3jb3!$ z)eI7hL~$%Hi>i&vvI%Dw&>M3zvT{5TMVvK5x>KvwEj43J&3|(Ud&SlA&9Ur(cU48{ zApP;}#_H^{v-|s7^xHnkKmk5GTa4&V{F%nsJXg5%xLs%1TvMzl$Ef)aXcN5%i=~%w zmm}rIus)wt#@`>IidBV5CZx}PtiwqV7lsELw@vR%8>QW{3{(h9B4&SX{a_b=|27aQ zQFg_+O>Lxh8s7riWu>mORHIgLqX~ddw2X7hE5L&cTJ@OuHb42uru4>1nQnSYppNkqF62F8I0w43q}b)Y2&U|?3jfFHr$EJLz&vn zj%$VgwDrN&7E}f{?fgx^uJ1~jnTt+_+ymoz?79@l+`!YX40#zB`HD3@*4OQhN9;u2vAq2(Kk=x#F>q8v~3Cux7LUY<*I z5g!R`XY^K_+_Nq;e@O_JD+O5mi7p>VDG;B>#b0%TEqfC353nsm9|Ht?qnl=z*VR>m zi7>O~hp*J?oarTFD}FAMi|k7GB>m}N2Z)!lho++i!?E`qAZu7*U1ac$H}zo&Mt9!(It!V9L_Bet+M$o ze^YQbt(0N9p)0G7Ny#7a^IghcwaV8B@~bg>(-w&iP<*$8%Bh@wZtln-f$_t4s&)A8 zZM072+w{J0-y2%0@wPP0gzhsCVvO|s!R@MnCC}j?L?ruu(W`lTv77TuYV@Lyb$O+a z1$@b&JbXZ+EVTPPWJcmYtHBR}yc+KR-u|y0|6i-Y55CA(VS5H9#Js6L3iKQq<>62T z`Nls+lw-cXGuX{USLffnT3^jsE_hiP-C$WQtXUbYiY?ZUS6k>ov;~{sf=AO$L*g+2 z1DflcGsNVKz(k-9AOi5^ljQFJn9wxROs|tzl1ToxNT1U{Wb_ohrwUR83Yk8I?}>xZ zfSYE#qt_&0Va-hw-rj3e@EX9buCuQ`XU_$+1k^L}2-!0M`2f5$?`u0puc5%Upe)V= zmCecsK-j54nTbcFiI@C8m{Bg!XG32Mrs;{eVt zE9ico4lvFu-gs3&Ps8i5W8$6$2n*0t`#OGYD6d2E4;u8KiCW(YcJ$xN0r0mnL0a1? z)^!eCbAeqyPj0z_{n6)$2HpUy0nd#*;`UHM^43*L^*SStoP^72L>-AH)ESqYfC^x7 zgPd_&>K=tqS7uRSQJ@JvSP8@l2+|w_p8qb5fFtM1Njg~ZX znp%*$HB|#%V5|fqjX6ymt&De|*MzDLsq87FWMIu!o zRVGz5RT5RO!yk`uG1L|$_uTIoQ?R~Nmf!QV= zp?emfECAY#yzt;P85kPy(6ByoO~pMr0pPA%@2!tD5zKgx0Yquy)&YKL_BslJy+IxT zC(ZrZ=-m=yWUx6%8DOk=-KKF3bktm}{a5?5?pH&j=^vv%1{On!lZj)A{fXm=!?TH` zWXbY8zvUmlfxm$Cfxf0s5qmt~IKT)nw*Ix(F?i1lbOG!(c?#XL0@VP0O?bzzsla&F zIhxikX#fI16>wH_7N}KUSlg>*f5tA<(y2EH^j-6Sr9f*yTN96%Jq%Tj zpcp%7uncGqzyJi*=?t?a$U88Ae}d?&nSTSRYpo5B`c%eL#%Gg<24+L{5J6x~TSGdd zqw(2_dxNtvIY@Q|4mK(Ui^i!k zb`KeB0a69{)xJ(%6HOsZ)e8(Uj{%eY=7SG*iZ2Y?I;iEi`lvyK!0MVWcOr{*l_qVmds-kX z;9{-L#Ao>9$oheNInCF#ikM|dL7RXspp2$xeNyeM!I2);I`nKzQ7Tn7RV)>VDzu1K zw&CloNfqV<<`Cu|%s$LXO!c7bYBe#zfn1ti=54b}EFM%}n33t-|6o>Wwv8__9?`x1 zH@!#&kg_59`%o{@ZfE5W!?pIfY)`c{iya5#4AaKhB1|a2FrX{IE^xU1XI-Pop99me z|Jhzl%%)RCt$Z^Vabk{Ef!+r2`9UNp~*+fw#$K_~$3hV{Yw*6ckg5G$bI>0P=m z`WHbT+y8DI96w+2AWgWsq&nnZ1iguUvys}YSyVBfx#sk;h_Y-^8AY(NnB0v~R0;nH zmY=vGAp#7BMNsad#5~XnL8>fZ;oggFczYOcv~K9FK)c>0mZ_Rv!%Hf6tWL=Lprt;m zZM928cY;%BFHuH!MCwjrYv`sitaZw`hV>yw8ZhxXWlMwgueQj&vGnQ1c!t(H^?IGj z`alz2FdGOA1l0o!+mcLJ!4#kn01|M&Vca+k%ob+C1*QTt08Q)14bmprVu~1|!uYF@ zWX(RKS`t94$ybYA-)Kg+&15}-*^fDnIfyxm*}F4EnfQAsZs_lyP!h>FBAAN+w?4>A z#Y8@DW~4HRKgF6DdhE$WG$H;9IcbVWSNseuxv_7&2nd_x8~98HJFQ; z@j-vp!6K(9{bLq5S|rQO{AONe^W$je`g46fXUM1toIb} z5q6N3S|T}4$Y$5y*>+=_?gsJY{$*c~*KSInO8xXYc*1I{?$=Z2$w3(gPxY(-6_C1P z{|P>MoH0vR$n56S(Z}`rl)Ry-f42c=fnB>SUC^pIQcl~9rX+cEysn|ZNJghYumOH@ zk%=x#Q^@LO{F4(yj%9qjC4x%Uss4l*XNiM&yPa0i24K7&*Kt^>i1zv=GZHC+Zx zilU`pp7+lFm%Ux>I0n+S_Hu8z6Xm@E?l^kV6;Jo4@QDF1gSJKzn)rKORajDr9f^ui ztBdAS?u5x6wAKUjMcl1#)l1UD#(GJo+sW*;UWP0f9z-vwos#r0y-H<7T?F%*m0T@9=h z`hsuK1PK-(xLa@t?w;TpEV#QfxVr~U2rh$LaCdhdAh-?gFz6tITz>afy?U?e|KYyc z``fNMyH0g?oj%>Y))GCOq;&W?otB&aP1osHtQ{3FcfmnE1YX1+8@?XCG5@rOH5eYQ z(LFetzQ*10Cp~+sgerSF%*V< zZOmULnxE1jd;PB>Sx!r>&@Sl1?KQS)^623m8_Gf>*=m-%6@ zn$(!AhugUN>o`i(qF*3j&wWwV&pGG#z88;YW!-a_Knq%C$l6byK!ne!IHH!$;WO1C zKtx33%DF!=2x%=iptLv_e&Uhp1sry?UX82?&~O1?WytqK-p)xv>n9ro>ejr8IO%>b z-itjQyfzRsy{_Wj2R@c8r_J2Fz88PdEBYUy_MHDWP#38R)s?u=K9bY)DfhOam-laf7lF8Ka@wqxls;C-NnY?8|Q1)gm)EFWJV*@ zu5`1=1+YoO3H8a@$aqz#xuku08Y?~}UHm|>7!X9a=sv!k)|vyYp_oW!ne~6H7ZbMH z(;^ z=FubEfH=>%iuzcUWGwFwN52o1)1L6$;&uM2g?T_FMRtKCuRoX_A=daUQs^>v3)bcBC&eW-xgkYO4a-}VyFy^+XFd5O_w@cYuPQD6zY*yF)lWREuwQM3h+Yi}YcxOhg4lO2})AC1KIe`6$0c6MH_|A!BOhnJK0|6{b%P9#6P z!PPtevx)PpIxA&tHHP<7a^fgq8$x+BH`ub2d30YHP`bo_kw?${l}JY`Q0k8km9npnIQfoe}MtB$S{OIPxw}VRbch;b$=DB z8N0I-rJ$4$|Ev5 zv^VK>QnzBwAk{}{4<|L9{S%^HZ%RDeHRC_cVx1-s_!_XF zS_X)myw$gL+Kb{FGXN5L2klGtl;pzAV)!f?**c95IL|Jq)aKMUbADgqXOZ2y5orCz znPscaEB~w>Mw!jM9>~+=5i1Kerg&+N()O*BgvcEEuw=Nw)18Q^6o)ER(ms8(2zjJ( zK*|Vpn`Xf+UXt$oND*usNI1%!el!{f26sr?67)>XcXt5+-|lPyc25yqS8}BLje=^C63bEj&qflJeN;jHcE4T z_>UL%BUBh)spjr$200=Jm4U^;zp=oWt|UV@#KVGkC7G{% zKf)9i)KZ}EZ~dy8)o~ZrS{R#xzrLqBrZg!j&Fo*KdQNA$ zzAMfAl=K(W^1k1BYCh?iVp$@aX2c3P5aqz%%JAe_1qQVp1p1Q-ukt2dXDLQtZZATi zXp5m23mEfdnI0j!cJ%h6QqwpCA+?m~AK_vwNCVzonw3REv)4O5y)dlk%;@e(@aTBc zcUv`zm7%wbS4#OD>TAilxTX6~&1fG7kh#BLFTgDlM1?zP)vz%!#GMr^pu&WVuTghZ zmkxT^!UJ}k!#4)L&j!?v*Opkr2_XaXNt;sto*4Z@@E_~$k19l|V&(qkcLDh-FdPb` zN*+0J<08b6#zUh3LGpBl9NSKO?zv(7Mp83h(aLC|wWz5P1O$K6U68gVE0+YE`ul`* z%A9DEtQ@xRO)4B}oa)eUdaL*@+ioy_H0FhjcIPkkKgqs-H^xuN^bpT_>DD@*57hifGXN9YuXgxyDka&v$Tg`ZUY zS^d=QskF}2Eop)sWqP#Tk9l&92@gdAY7QtV9Y%YJ78R9mO?$&_^l2sAS;+vcliG3| zYruT`zgLw-ot3}gO{p6Qt}W&So#qv4iz^J6FeqG_ENnnP-cutC=DR{U)>EbQL83Gt zD6wNezRT15g5z=S^GxaxG%5QXe2;aV)9@$XNrYBohGKvDU-YsBPwWx&PNlmGVOTuO zKI@}R_r>B2MUWwAWBVqxj4E^Yo6{dNJ2$7(DsdrG7JHGkbRUPMUxxEWH+yqB6ybKR zKNq49j=!jFQYdo8Au_BIUErP8qC;<8?U6Y(uOXLb#Jp?3dXA!pFV&q*uzuOz(|BPh! zdunb`wnYC$82!N07^wSU^@;NF2@BS+5#Bc1s#7gbeXl*og7A2}gAJt|+Ck!Y6_GW& zuXjTk2wiwme+;qs7n+MnA)RB)S{fSQSAF`g0z2-T7{vZd{`W($SWs)v2WF?ZF2w6z z3#V#D_*KaJrazX!M4O^DYHVmiHD4A)ENpnRC&rg?maL$}FWlK%sXTlMg$1WvK0lT6 zdV+D>llU*D0hkJqId#PvNkY7>vmOlc`%&bvK%mN4K)Uh+|yZi8Gotma!qG|tW zr^L2)a`g%Ii$&m5ZzM!5qF!#ubPz?|30*y+`+aoz77dhv-KI2F?pHV(fk<3Cq z?9;%z{yFgs^6sDSn_;Mp!;q_a5w;VSrjMmR!k9}{naa7bDn;wFqs$?+erKkMyC&mw z>e*@-?#sWDkFu8iQ2pa)b-|S%h5d?q>06=Mjv~%uaiAsG<5S7B6}7#dk<{}t7V$tW z>!#RJ&M^^Tf$|E)VVpGvXxM)m`MDfxh)--tS`7Wew{<`c5+JDl0*p``tb6eq>}$-f zC$00Oc~cX70dL^ft4ZYKYSAh1ne9W3<*hoA3e}IUxW9fs;)Z8jSEO#@gmp)nW-4oF@oVB5 zkp1`EC&t-oKqe)AAG!*(a;s~08YET~hI{x6=YR1=?=8zL^TIyCMnwB~R{)fT{IkTT zt`nU(!H^O0w88k*)ZbMNO^X|Y3#RMt!N)a*(8UUz{gkOgt}>|AmpHd2R?&q;(aE+v zu}Ue5)~^p01aaos(<#{|VnuztiUq5m00Jip)Y^Q+peBN>{ofwJ9E7oOMH0H>U8Xl5 zBCUV*BOhCy!X*UpS(kEcz>IPhBoOMFzm)99T@GiF*%AcULpCn_tmFh+32%Yga;xR=RJOwx(eDv*$m}H zf-gDRYeZk?Pc56XIOH=aghE3DqZLpno-rRk5h{#43;nY5BQtOJ+!kP!B6xBW-bpgT zET9Z-O>2?62GQ)yfa-Y)Mel+@P(Zgu)T=A1;#aXe`XnEP+AjN&9&)t7Xc!`(>2ARw_EOLndT|w&cLwVMEK9`zc z&QVk64c_iiauJE~o0XVY1V}977C5s2dWox<_8W#=$n^Z;4E@OB8!g@q$PLVu8+XoR z`b+p4%TR0?uq!)wEKRQd^X(ApEOaPLZKR%y&F)(%73-`@$m{e&6~T-*&b~F3#WWTL zhH;oo2!2M5fmA961YwQ9EqMDdK6F6DR2-E7(WAS?74&Gmg_Isb?Km7wRV``gU# z&y33ST+vdv6`W^p!#-fH3l;gh$XX9ZgD;941a`fxxWmkNStfVJ2JHHF{lFRu_=GE4 zL`ZOTYxO40-LlaY&-7nJu-Vl32Zwv&rS7SXs!7%J^zc2TVR0Hcrw!!YX80X9>>Foj z7(U~cT|<>IhH9?_RYs&W-M5Ex1ejA*e9OMf=6Tq?qe^&nt6xTSt7+cHRFw!CGTw}; z8Au)Xh!BzDfH#{hz*e*ZUOBIGO%_c4CFR1%+VCwoX{b_=^iuB1W-5cuhS>#m{^JP} zbw;1Y&h_p27UuA2_=wC|pKBA0(SQ<*I?Ko8^EIo2P;%%5;=3w4Qpj{)`Unt6qu^pUVWMb)l|F$D>nUwxY5M`u%<=Kj8B%uHKO- zHP$fuOP^s5!fF@hg3x+xJR{3ur8eN^_X*L2QvWXDX>6lQ!X3MPVSh^#eOa_Gy+zx& zYB7Uyx-`<5n@G&9Dr1ESJDV}t`~=Y|paGZytQGNS{{q) zYeh1{c!6_0`Vsw&q{SFg91dO9?KAX*hG)07V+^^rMrOWiZ)hZ~k({O?!zaI*BvjRA9d3$55_|u>)zG+*%TkvJIB4fCdg9*BNArc$-iGg&k zJL3XW?5o?Xji9Pbde)d1ozVBK(4>cm_-0l`b!)v%Qr}&{Ms}sG4XYXNUq}tDT{B0S z^A_o7|BSkGq5}v=KkRg^iL)$YGqFV!*w=kOg5u6~2CIWLZYWPv*Ta1~ZM)~2YI%7d zBz^Es;3(?PTvw_uw5Zj)b5h3K8CM`*RadoYBJmnK>$2%0-1;;!N2Q4z0=Hh;vKC_+ zzag*5;MItVIqyZo`R7_gmdQh!THh_$(xbKo2~Oz>KRMJizgT zpm%>J zbKw~s?DtxxqXdhSy-2*qj8Hx#0C|OaQelb4WfZExp zw`ne*CfVPs;6RcJ_Rrgcz^5vLlZT;!>tPbAvzR#|1aC$j|I?#}XV{dC^^U%}-oP1% zxl690x-#urmG7F@F(o?hKli&ki08<5jwl%*PZa}PX!kVl4lE9*H!o*-& z)y<@-hnI|K^(N|Cb_`GqUlib0LlCUjK!KW8;ks2X88AYg2c%!22+~&3JqO{r+zRA( zqEtL8bQ+q`-AX}q*%7|5K}l=uFRg;7uDkoJ zg0yn(k|hkzB@K56(j}-ZeDlaTPJi?st9T!e`3SiuEh<58frOqOc`Ujcn-8n#_v7`= zBo_eS>r?rGC>rBbC+5zQW(75V&vQt96NA1PZY9qRIh8IhRRUrS^j-^hFz16Xu2XY6 zaYUn;s^O!SHkU76njZY%l;@=PYq`ZHq?cFK%$M`m&Yw-ph#}aAH07&ZAHr*}h3(<# z^SJ+_xk`9jC_?}qRD)xv1f5$WSmS@Rjw>fLbq~iOoa())xsj^D(5UTA!%E~1B zLS}A#{Zvl0i${JdAy}ED{j6TwHD}+3do-lOnrR99LgxR6_mr2(w7L_TaC!hw_3yq6}bNfGW1p^Td|lFivZMhzD-(TrY-J~fYfXQQ{gZ{1(5qalRp?l2lq%wVP5j?(C2 z|6ZSK>3!$TFI+5@)ftDCL=-m56@&YK)um{)iiZ1d@^ zFPn{joBRZ+t#gbg*n} zTZrD+{V$-tTgp#tdoVwzGmal$2;G-5RllYgKZg8p^@ROyenD&&!SL$)^7&u-|ou+_DXSR@<=Kx|vf8d-{ z$G2>VsWMmgh_YJlh|0R3QwP^&?Qog&-=7Z6@@}#9k=q0Z2%$aO0}c(7o6=kpRv=^k zUxtUq>Xhpqp``wFrx}lF>fir;ZG=$#p1HL8@_?|mk zS!4EqH2=|FSZo=VZy?@T6FdToZ2n~(EL}Z#5I8wRS|KmtfebR9hjA^%q=!Zl zG}t7|+YL`uYpQM=E8|Ts$nGEwL?+_f96f_+=zC#_a#=w$w167KO@*8P2B$;w<3GhxH2&D z*tkn^utQIZExfVb2dv1@m>>WlR~+_YIhdJ;DnA;wQ1sp&N{S>uo6(O7@48-3`$h*8 z)%>n!%(f!17AApK>BE0133x(Vvg$T@=?=Smq#ia@MmaN4LV%AJQP6o$_ZoS?5|}C7 zz+n2BP-UVUPBFzXU`Vv`V7b2+p~d0Q!5C#tIkf)K0v78$LhjgMs?1(@=1}uK-FdnT z?@Pq19BClj)2EdJ!O#m*_7`dAL62kFrd2C_u2WL>4Lk>c1Ysylvxe}6KHV{}KYpdv zSeS(IS(}-nksx^R0x)<<--DSelIGY@143QzB2(zElB2vZ`ql}5qm27R zb4Jn9ZiHSx{_g%Pn9nQqLU$J?o>&e+dG5}?5XpbVdrHk&e^!XSd`&NhVe;*-h`deW z4r#p--EPc?nM8(OFNQBlX#?0_#5(B>8V;!LXm-YI)o zPKRG0^4K%&>bFj(O3IJ`p8|#|$iDFlT(^?iCeGlB8IeS1P0Y5I{+h z@PKdO0<0WR*PHpkU7%cB{QASsLToQuN@tg=fnjcoeMLM);oK(ht68?l)!iw@gP^U! zJqFq7-xMjxcwHHmjp?`UWY5tEVge&N3{8GP+J%<&)9!Ow$tw0yoOFqmj*~|NC=`y z&{cwTPX_gV=;-%0(mn5`2=d4e^1H*h^+vJ~-0di!zpe^@6qB`k5Y=TKTlSTbNs>5U zUWu4rQwAcg15@;6jE7xl`H4GizT(H>jZy>}q)+hC-Q zXT~&GAU1J8ZQXU5-zjyU^?aRqOJ3Sf__st^x;08RK%zor`QO)oKxTLokv!jCTU39! z32&=SGZme?KQt(kV|VZOGP0~@xBD6eSI(X19=u6~Ky<&0US(rYyT=hp3}gh=&tXJA zlMauikMgOC+ToI{^>aUp{LUDGO?Reao79UHfl8o0qLXgGR>uNcPE0JBf?jPiL4Sve zqUu^`g1b!v!HOEBD0|L#D}WZpm-d^D0d*Y;V8yIlK$^z)7TX~|DWYiZ=7^Z)42Y^z zE#~lXATCa=z3XuV%FBI$~!0lbpzR7XGl@OKvyd zb0WKQHAd~-sArxtWVb~v$$(}>&65b+YSgUKRExPt1TDTKx~cG2GtUK2)^{gCysnwu zs!s?8eo7he!4Jq}#&1^(oXqa{h>fW%woobsL}J=aF`V(}zf#>q`2|k314S3W1$d_- zx|G2}6oQ5kuk+}@wCN~+LN14+`Z$geF?~lw4r&% zuxp6!p#E`%ycVRCSMOJ|fOuIFq_=w~EO9PQo@7kM3BMNEI3idfVH*mY2I&Qh23nmw zXLsTehmyWb0lx|@COcHIbhR@fzH2~enP)3NXOjt<>Oq(c z_B$|qWMg_08`t`Vdl+BC=G;LXgpZI13I%_|bRy`B2)^C4tU5w`UdI0ZKX@p%s2ie_?pbO>4-M%h&a-2jE)=@zoUzn zE3Ea&W6z)R#2qeolGRMJ#Sh!Un`h4R7ibU7DJMcV53)M(@ir4hmKugoCDYrXB81CH zC%N;f9xrv6sY)1UyWmqoxEJlf(WmgO)cxFM=~mh79=xciYCPvUNi+`e`sZ1$(VuVcsx7Tokg6boPGH&xq~D;BD+y=tVgn zR2-k?IUK703Eh^&|BPB%L9Z)tSRzU5kN<$_{bscOHy?yYoKCrG*-t*iI^2mRik>-*^^ zx9B?GZnq7i$LkR`3p%Leeu9*YQMOQ=lj$@BjnbLw z$GXP0k_(tp%R2I|7RF?lDaxPR=UaKC7t=J$Z;D+qw_oPauu<(-Jrs#!LK=W^rrv&> zIim#mqT>$*Pc2=9w|S^YOt`tCC-AroILIFaKlnr(bMdpY)o@+dZc1+c>1pi&d_zqU z_qRnlLCpN1mn#|?vJ?6z)DvkD$rI@=;uuH0LUeuGXP70by}eWvt5kXUK05Ewd>GU9 z7^fo1;HX_5%5&?Te`Qi4s%~e#m=^n(w1NACbJwa)G1)~~PQzr3UrlR(*dFtx`3VC4 z^>}T<1igO7dh*S7ytuaTh8W6tU)3So3Qh`l{@12EaFiW~eVA$7t1lQeZFpJsL zJ!MQ*#cPR3wl=?+hZyL{odW$GqY;hY3;oQ2QNZ1W=bazlYdgT=FkJ>zA>jI&u3>d9 z@W-7F{YBSmtn?-)j%3A#K`k;HrCwb6z~qegA@!3=N&6uDy`N;q`QJlBCQNVgt6eN_ zP@3ID=zeAwXO~xUOm!Qe1)>D@OmTdu^wFedo+pkU@0e~~x}Gn&(INa-sjNYKvQy5} zywUik@H020{o-8(HynWPfe4q1XahWO}r=9}3VgL)7S4j8%EuQ;V zdp-)lUivFV(Tvv~&=tjl@{`pIn%HpQ-oXRnM(W+n>WRY*?nay#GM5P4wGYu+fkr9Q zeD2CztgZ^Ptuo_W_@f&BZNWG0Sm{}s3`H($y2-ukd#?ayqd_ZME1u$To2@q$==_w@ z+qp*@_(s(gYlkdesm#BV3n|u~)}FSWc6JR`jqocmE9EP8E2^S7343Vwoecg2F9?B& zVnch`ftaqBk&Qckg&QbEMxZY*rNA+^E3pPwAGWR#km>vbWJS*}$xx!YYH??MwC8ZE z?{mL|C~Cp%k6IMNj3Oz!_bjD@HYh8U4mBy2=rc^#Tnz#%`VK&ka$Y0l1;>9Lw(Sk;E97Utntlk0 z8%`<}?^AW}7s#@}c{nge&*U@c#uV1+eo6i}rrx3X6T4yThg`XMa^qjzxot*Og#ue$ zw!c-kLv5_twzkh@DZD|)b^>!MB=T_$CUcVGMMEo2wtwSt_g5z8)JO{AiWXM9YzveM z%^lzx|Iw-IIbavr{ww{%`$y3FU!z`WJ#Wlx1LB;5o@{5M)SQf-;&Y<~w@Q1=2kUX0 z8V^hhJ7(*srf8RjWa%UIrqoYt-{e8a zdnSB_o`K2yqIss|(5P+WXX?kKNtE-`H%@)%U|VztZAW2;*}2=Rhu^`^!OtuUt2_=6 z2N@vV@!dh%sqm7mz{4aFBNqGVJ_LFcc?{SgsNLYF=OyeWJ{Y7PLcGlOn!8sLwtT9p zUNGWfB@QG^9oQMlF)eUjZu!jnb8_3a;Hy2BSQOHr&Sk;9+>`xM$N-Kwj-oxD{^7Qq zNtAB`Exd<5To7{35)}|HWr^|0G!+@AU8=?FyJVC|ev+4=WOV0e z`3}nbLD5>HfvDN-AO>nf+&40BEM~*c3S5l(*Xj`-?F?*&_;>@~R8>1Z!)I=_ye>2DODq{by3`uus(){4YuRZ^V!0cTZ%4%)DH{eQPq+P&bkfQ;HZV|SXq2u@`L3w;}XD}8%OOG#TvYe~CRi&Y!I z8RP=;06Bx)L9XCqAjELf@K9%8=e%{?wfTS?gbI=eF@ZW4Di{1f@W6Q>4-koix9V2- zROdhkvb^an2{ULv^-j_O4htvv^7`<4^ZGV<0Wa)tg^!l^y~nN_=t1?_$)?HLHkzRv z_!qmrz{}GM;gjW~<-O%I@2UFX>kg~x14MuofB>Kc-~)I9FadAW#m%YB&3x;bTW33G zCzCgp-Y{>NFOnCM50W>MotPa#gTjRI)(g>>3|h{AVp+*%mlgJP>X)ubO5l9K&e@`K7b#G7Acs+TMcMa z-k3=<=4DCVbR~;_OK!Y>*842$TkYTLjo{a=E}hw384W66b)gvuc z+9T?Xotu<*W_IdzV)`z6FZz0VHG3Ofh;FYRtsk!MtZ(`+z)C$+@0=m73pfiNcIC+c z5gQRRhk6ffWgU>NvYt?-uwCk%97KIX7;?QazrnIW-;vqj+iBO4;O*&~0~I~=A8Ah! z8EvY)Te5`m`u-H@_o%U2cC}{{>h-AggQ5yekeemc$JyifhB#O#*OuszuTFW?t=nVz z0y}%4_3)=sP)?x{L$m67iBn{uu?=HDo!KeBZ%k+Z#-6C-B|G_4f!9a2@$pM-L*Vcw zf?>MW*3S;X>R!96l@3-w56P8u2MS;;^K!of5iqKEX}%GWx^Lm7N8YPt%L=cS7co*xYP5DEyjn3%nJygh?#Ooz&cRxeq+{2-^COKrd}iH4MQ% z{f{F){~nhFE&WZ&nnfEo>O|`|n!f9>Merco5iamol-<_3PdC4H{qCY5cHn|8z!JXU z*j*YV7UB$U?uzSz_+|QmVZ5+_3x~_>Tahi;O!NHRABY?n)aB4+&^Y1Gj|mbOz?LwEtnOY)3x3;-gV%o<>%{%*mdCV>)!-HjPZkg znQR7|cERJ{by^>FrGDwh?n-4zh2-u*JKr*?YF$IHUCNQ~`qcx5P70)E?~bgt&rhb( z2~46H22mciL#1 zDqGXG^0u@sd2B8No&lDs4AYeIl(e(IEc-3TQLR;NroeJ?r5VHgvXyae17E4Nb?&R;>Jw zVzK0e&cC=zr&|h~51P%HwmQ+f6?pvfsPCUCsvIcatr4i3Q+;rRc60V9E@EU< zC{_S2hD-w%bLI(7gl=gcS^dkp^tvSizjv#H3)ueUET*3L-nQQm+_c~NKcYSdb*Tla z8WnXHg3G%rz$FxOkVT^zifNJ*^8Bg22cJ;-`UI;bCWK_8xnsBk;sw6xIukkfU!35GOqZD4+W*A02-!U`;+_p*L1EG0xP7D=XqHBUC13K#2X?70cO(C%P;HxyDHkpQmAEcV-Yqh|C74BgadOuOy9Y!e*ak|A%X_z$Uf_z%^oh?edzvZ4-B98q0*GB{SFhsRITt*TgqV zHm+4z*InUV*qsOjsf(wpu}jF`6Q-HzIx)I$@0uvE=sN|AxvRTVhTwrw!6IGB`v&_i zu0LCs^etI=#?vh(IW29#on0DTunCrZ%WKtZtn2r#jjf}86N;SWne!8pT)lTekN~j3 ze!#VkYffvwu<=l)v!z6R@bXCWTJr?4SzP0g&Q2xDf}~v{&vJ&TQe&~sze=J7#gnXC z<{azw5WJfoqU#wBlUvVTfuaK$<4Bjiex*DK;6ccKv-ZKQ<%0OeKiz@A`hbJf`{iU(~~*|X>dhpg+BzHZfy$D-@A&pv77RWgS~H?k)lcD0QIy0+^k>ozR) zDSDndo_cqO!w=GS;f=Gp7VCEFR_g!}meXaa3|){s5fAa+Q&VMr8IgLn9>|j6 z!hL*#QJlRZl`FX{{4A=KK9v=*`M3~3S}sPg5NKHO9HRX^+6XqTc-dV6d|2>zt+~of zXauuXz-o>q0)6LnsyF%pTg(ke_dYM|;M{C8)=rj%oFgAC5e-PL=dSWe=Z#~_ROd38 zkLOtnGzA!OyXVj$1j+ZD^^KbM5$dF;7~QKEi`S?iIZ?0pmjz^n@jkW!E3Aye1!RG7zjkA?I6)q^{2CZFWMd*tGk&_PLr;y_>J+{`V z3Lfk7|3CK&lIJ>ufcr)6f5KwpV-vEFb>>Q|g!A<_kE69Rv&gw5=-En>oa6dmiTCT8hh5(D$dN%Q)w8h-3QzH(e|MoA(7zO&Te2OiK8xrDT#$EbKv;SxT zty2$Nu2AC>EsBLgR_#c&eGbl@h!eASNmAIqSbeo52Yt&g61F&+UA_LCc=eaI0aVU= zUCvqR;%FEb24(b?Lm8=1sLsc+_Lkne2-}=R3+toLdfr<3m8B9@N*WMcsIFOCn(ovw zt;9egAN4pmI9j(>E0iA5^M{X(Mg1X^n}#upFB<# zpO9H<*JH$l)t?kFea;!-gVJ*Z{+GOV9+$qga1&zm^~oRFUM7SRX;poR+3XLYF+VJ6 z{*xP%cS`FJuCJ43ot#SKBN*IN+twQYhpnqhu3+70VAhvK*@1m%iksaiWO2+iZlxH5Vb%L{Hwdo(%&PJ3-CSGI}@4K7viEA2@Q$8k7 z=?;uUdj!vsOg?Uzlu%Ai3tg?f?-KRX4rpAGN#DK6cQ;Y?f(XmL6EBnfwq~e8XU+W+ zKrZZkU3XQNxGTpcVtcRc)z#i-gg1QCk-DQDikGDoc29JlZY$+QgMU2ebEM?>a4ESjU;yJswL$~ zViXsbUu1twb0mg~D~cA8c5jvc>h$~jm+m3385}n`W_@i;U1d^3nDxK}Pg?G4+2buu zS~8>qjb_3W)j4^(O}R#JY5!;>$vxm@O3=NXzfaFK??`lhz|#{SVj?QQj@5K=4G8D` zm6;SfRy~c+$=NL~G=faqpS;UNRBT=<&n`(jqMBG4kFLqd(Cvq6o}2hy{*n2pghoNG zII&CqkzrINv8pgNk(*W|`9@_{q}$I|mT&LmZ(&U0l)ND0QE6dl;tyIz`IUoVkC2A( zgtLz}<%MyHyH883zZT?m_l7;f8-C1+%em4{Cf#Vx{@7cVlru}#p{7Mo=2r1u&{9(_ zDladLPh63=Wh^V>EYC}%FLRrpueFuOPX3|6QO;0Sf&D`*F?V*bJk41?L>^9_iK*^!*sWH!a0Mu@ude)5X6hP1$^ zIuWY#r#hQYsn~e~D*5FM`VVQ?N+<&P%krp6`p($asmTy!+SH_{yxsgnaPm`%Q8C|c zNaFQ_#Vl1flR(KX0j*d$gBat3Dk4zhDl`$2>{pWSSH2sY2$-v_PR`Q2N={_cNMcL! ztAbNzd{7)Mkry3hTSvO8O-#5_xC{3y-G#@y?@Ii`bP<+#m()?dONO`Su;;7r5YH@7 z3Jp%6H=-bKIFWFKDUfU4o#-!b zs8XjjdMytvbptUjSHgKRE|=}<&Lm;3na3nf zGOg1k8yBGZ>5rPplWSZ>C8EnOFzb};Zap1R{c4maA4?!d^=23IAv9MCaAP%h54dp} zftNqma>+^bO|DR@QyYzbIwTztVFI;l06oiuQ<-&&>$=owTO>x+@LYdJ*YS%9!n?GO zo$@9xf2Kv#m_TGoePDX1l;3AeFE@`*T$ZKg38>ZXTOBBk!oG(JGe>NJ{+uFcU{fo%9iukbyZmnULu3sU0w< zKQjHqknR|TIx(6cA7NhJ)63+f5->+P((_b``0M?HY7FJ`H8lr{-SZVSvv^&S`Tm>M zf`9VOXu<$@wZ*QCX~5!a@|#;Si(&ukNJ*ji`P1N}tWYL^QRn#lp-7EtJ;_ZY*dWPG zGdMfR?Z=xV;O;B2Xh_Ma2A?IrIs4%|n@kESz!>iXR{F2qr>HNoCr?oUzcl4__hm*l zR{oFIb5AD7hH#H2l)ksgqL{&*m6hv%LiW!8a4dP7JU5>3i$z;YF8c|2`B!7|oSH>h zSz!c|S|XF0JMH|B-~YuxcxirNU81}^_Ky%RJg)C}jHRepDhK7Ig(1;8hH@41hTr9M zXf1wB%*vbocwdk#S>_`B7w5kam!@vKUC586rl+{kuO-mm`~L3 zkkCJy2ws>b5OMH(Kxp%(7>3A)4Fo_?!6q{QM~vXb%1robodWo3-Ff)xub8?#aBS44 zv@J#rLkNjy@C!=8&kLnJqITw+h@{JQ^XS5u(4Yf99JqFW9Qby>KX5|+f8aqOWSd0y z*Lka0^q9ivK)BWah$Ot=>^Tk)cr&@+Gbf0G_AeQ&Y*YpW-Iwg1pY&t?hgWEgo;Kw^p;Kn}%&}$Dc zBjI1s1taSI>3NFvCTe!fPUYY;^BBsitmm3o&JYmt@JL;L16~Tk3YJHU!aOyHCSvuP zrY6#UbFi2_F4z$HE@2Z-;nO#dq_w%rM~Kqej_oo&J>fc>;8s54R!-r@%_Lng_A;*V z37DA<6;|SKS(}1>UeoO>&1D`a3G@7(k(_K_6}^}kSc)#2-qpdWyqH5F+Tk1mkfm~X z+^5B6s`RKTNt?&634S*7`eQ=WsuyYT(SehOxU6En zGkMR%B0kw<)Z*{FEIPA)#f?=_AXB9bU22skbFyilSqO))?gC4(p3mQc%2npd)#)<3 z=Cm-TGLsoyZ8yq{B7}UNXnwp)SA>KOihwU!;o&wWCLV+e7!o(^fjywzx4D7Qm$o?d z{)(W%-AISaG;=g6&f&jZz@st~%N}#0%1LrM1zx>o1tng)1v%O(M4q??CuC3Ra|{;2 zUt>noyQ^j&Y*yBjfO8hjz&yT1M^=-S8f6jN!haD97sN^+gbLG_OXla$m!xFyz zp`#b*zOYd*WRv?a2A2D?mHJxDA0YoYOo4~a>4hCYNAThM%fXM=#TV<>610og%N)*7 zq){zo156yeijMv8exMtUQoi@SCkabIazl=2K2^iJs`&;W?=1dU0m0Fy?84XHoDiZR z!mCRI8ivG?F7q6gC1ekh7gH+ct(dt`Es0Yn*zieTz!zC?*B#GJBI7hH0gqS z+16gRZQHi3y=>dIZQHi(y=>d|thf7{KHr(sKl-|6=Fd}^Rasf}RAyzw9T64xnFx>> z-QlbphDaMzrpWB*tY%Pw8FUoLfEiX)2?W!eo1s0VbdZNVnb2zq=e7Ja>?t)9)D`+m z=4tEEtXAVkl`uz-SWPfhuCN|Hk4?e489lY%_>9Fs^&@o==ZTEg%%P)d+qw_^T6L(`o-+~EN2QX78rV)k;PP9swstI4wNHeL$o91&Mu=Y zqp0fOGK^{}$Tp2@3&k?mq4kS3RIU9$Esuw{Wa_J}n-)==qLDtsm&D~=cilV-#w*(~ zq{gJ|KuIs1N3{0lImHL2VgJhqXl_YPGqg*ZyKCR;vs+KnLb~awf#=zK8QK+Du*~ys z25iUvr-4VUlasI6IPU0oVt1H=Sp*IVn_0>e&Kh>CJk~kht^5{xb1$i!VE0*w5-?&@ zamUQ9`HI>1U|q%+0QWGblGB?UqY69X(U$#Mb^#2?o|Q{=nfEH8EhpO+R0 z)L+=uLErDTHdJ|mh!SM~?|!}we+z*myo3bju(g%8-K%1`b7?Xb|B37n#;kEp=J@%1 zOC-{NE$e_p8Z%i-!Xyb3DjU^8I8t_Deyu}|qv`8lK^D~eZ47gkwQ(&cliQBvV?3;( zD4nHByTclrnt-Mse|$!Z1|M6M?dma+9BKc_ zHHh{@0*Oh9^_@z00#1?IoWJE--(i(&Tq@pYi-gd@Z!wYVXY`5 zI46p1v(Y+hr8Xx7LoKE1++0pr2RtRRZ<&PF z+htLj#_*)|U|0Lhl?!Lns?CdfA!Ko)ow`C~Rm7{9n~BIvBISjeo3jOdsmgBfMt*_; zam7q4>CCRmAY>C69m(vq^N=#}{zXFc4bfvp!p7k`d_B0Oe2b;Z?sJK`>n=A^tSaLi0x%R;WrJtMV#tQIWoLFH>=)Mjf+YUY(*KNug9-klYZvK+e{x zh{w@>j`_(2PD>){!stGFR`HfXvDwHxYo5c+<~rM3+;qO2;nnet=S?7CCbOUPx$HJd zq7=2rR*zz%)wyXP5yDY2p)A~-gJZrwe8F@tGL%G(9SX0ZIe>fQ5>x{Fd%bZWl*R94 zEz738L3e4xJb4@PC|$(t^mih3QS-*t=sZfnUc;MX=n1}Nnbay@U|;rJr#wo2zTto) zb+zO9w3#v?B1JN)KW|}5_3udP!@KtM7_F@iz0CDQYQy$df{n4v0u-Ezr6#k^-8*o8 zQ-Bz+WUm-hu_CMG=3X7hg=)Q63*UEI@?!Qcc>%S6gOxqL3aNo?AP*)>E(777wXXHWx(ynVOnHiK zyPD)E4<@qp+!0?K7))rLM3o}J2#n$}QICCp`Z(kKaT4b0)RkKIrlT&+lIau8Wn83l z2ho=edSe5Lv^u|)m2~R8a#pGGtwtC7b%-vDSBarioz;1yi<}k8Hx`M!qUFaX^C)v7 zW*PJIr^eexvkP&F@`YB%{duJltC3+VjaCcKQ}J+%zw{Rd_F8kcK_qON*HOjNnifsl zu~8c3Wfxw%4K}>4bXaTwn>}LMD@aH$+cC~3DS4~K-k-xk+{kq+lv1wJJ;&!-+C+H>j=! zm_mPRE^pl@=_uvuA}0=6h11?PFcE}Mxi-c|=M&UO}2{H!Ly=u|XZ#yhRWmtZW^=XOzs zTPJc6ktqF<(vVD8DVs$~a8p#!G!aXG4}m+0f07#;9I~jx>?GZe5E>DSJ?@i8y1zY2`iB<<-C9{q{A-ef;O1;0kJAJB1tgKFDE zzSKdp_aNKoB0P99=iUSpv`&ccl*ez0$?vKXw3a+SlZI>i{5z@@6K~p1`geM~YE8XO znUk``+~iQ@vT~z5D@n7d!M?&-#d`6-KX{J{nNM~1&1eO#%8DT@C_iq;1OZ(#_VX>q zvJl3#=MkO}vX-HK2}l`C)Nl{pqd@8hrEx->#G8FyTPQb5&H#J^_I#8`CO$YO$bzf= zDDMO4vBgnlo$JRAGx^u48t!prf&ejezDQFkJ##CK^pww}O`>857Ts!D%?GBXVJ_0? zpTIGOaY8}__x&Y|-ItQwE=WVTpz$939|K*K{aW#oPhM6BK}}v_XsoyrOkFU$$<8;HwvoWmO3Y5=ktg%LM*54`AR8N=-(o?DCix3X)g|_ES#`QR@qv3cRlzT zEVQuV-ZD4l{~l)^%?mN!!I%ie3W&1!UyyspO;{v;+PRDP5SGAHgSk94Sj~^TC2qUzOp+#3 zLE#qVW)~x~QqciQ`P^099+39=-2PieZheh%yu7!x4T9Ch2)x6To&4?M9Qonun0rcNdn)5p~5ki*O_L;l<0>VB#dBpL;6h<<(WrE;Q zqQFiH02CfF1MNPiJfZ2t9fSlTqVK>%daxeA>^}P7LE%DU&_rBJq+Ip(+1g*YI1xP zQk$lR1fWuOv-w1$!F)K3wn+284B@H|?(ln^ApK$G5KVxp4HBSWVBfU(^#xwNsv4&L zIMDrmu{-R6<2E~!Pkut4c`Nvn+hLY}j~G6p4Lfv6c*U>57e=UZ04Nbqy~D7-6Dn8t zS0k{JF+bnN+vS*}l7a)Er@x<#@nj;G7X@mY0V1CuP)-hhoGStkU@4m%!uOXP!5mM8 zeF?)N4aN#}>;NE7?f|{}4U6nQ z7E2hOH4BQj89L2gHcpFxkxoAHA8Cw%yc)>gTONVmUh z^_3AzHq4&wEf6*XIfmMSJGd5ctJmy`8EMRS{z<*i*K$0zXW@A0X|T3 zy8%7|d;s1O#KQmEtaVKPqpWqztpA=`P&;{ z=~r}>QScB;w8QAStb98RN$pO^gZdj0&6ME2C1TCoIV#e_Qd($2PJzgYZ7mI@1f5C= zrQK2iWE!4ThZMxf@FP!!1-y+?rh=B9i;ZW9$1Ng^$T{eWm)2UB=`nn_txyUc7^4`~O{EBOB|B5_I14qH!UuCFEkK?TO|x_(D;r1=Jq4?t$}w`icVR3#$9;|Gnvix4 zukyiBL7~=Bd@h-7(0cf8wdSspWiGlzu*0WGi{fBPJn|JQRAFmrT}GqxABv9h(XHnw)eW1|(cv9z&Q zwAD8>rWG=FHZwF<5EY;mF|%|uwx<=b)OR!%GB&g^GNu(XHZw7G#G_|qq7}E&H!+sh zcd)?wqi^Y8Oe=2f_|q4yg7HstGiT%f=+Vr|*wNn1@IQ{(+uHoo5uT&HlkvY24C1lS zF|pDrIvF^++5U9!PlvQh{}k0^4a|)V9cfj~j2unz=$V=ReRMCgK4X+r(MP`Vk$i4K zFbbdqBGT8?kfMT-6U+^e^1Z^C@ewUD)GCOT#epO=TRvgg z$rb;%0yRzg@9&$tmBAq>RhpQn`!!d_z9H)wN<}F1U##9&mlHPGowj=?ZGtn|Tg$w6 zuGhBL!-kqHSgRiiDXv8~w_#Q_Fc&y(Uu7-cJ}+6zAu|gHxVRsjf}Ov?kz?8Xz@49~ zIBn0*z806QzxPPpdEe)#D|qfC!@KJGlW09ZRCV7sU?~;?gPBT@+Q)01gY-BVf;_oh zuXmetz4z{MJ&1fQRf3Kj{lRh3IWmNu@F7$PMLBWZP`(HQej!NediW}w#9E(zTrQ+A z6yHnKq9wiD$i{fzM|kOe{auRo{f*XM-CH&3o+v6&(NE= z>&*24Dd_Tkl>)n)xl^!LJ&7=qM2YkHkkVZ9^3i#X|bkBTj-C zHz=J|Kv&&)RhJDT`)uixXR`364p8n7BxO2aRKI7HQ5*m4V^d4$Jza0ZU1xdCpOhVv zBP?bDMYiC&Qys7x-z+MGn_4B)R@ob(H~q`;FQngcOQ#VHBn-c$c$9e`i90|V4oYD-NnjAN<8n+VD0l|9>KuS zzj|x_SlFsJlKfVXvreCkM2`EtmN2Ds#_=$sF8L7`8k4}^yKoBXSw%acv%BN~1mMY! zld7Sl(!Xe!a@TcDfuxf(Ri6z78^);%6G*oN#Z?C5?Dv_w^n@xHyzRAx%QJ!pNlaA( z5LbV!2UpE=g}Lyly?~77w+pW>ynN^?jY&!!hEv%{qB96{BXWo+1m%Gtj-gQHOnB0O zWrb8dZp&J^f2C%jrMfAlG&6w-x-es^x}7~xKoz3K`BgHXwa{0%?VqjQ{bD%Q&>DV` z#*UM#U2jC8XF(-WP$gAm#F%(d#huN(pXv+t+}s-Y zCfBSQPsikHxosLcgedHd9kd0}+}Ym3(?A~87e%8a@@x4jY%cQxMuUsBzb2*&vtY`L zmlVzejbqh!4-Jg;HU%P{2Y(I9VI+z%*@3ZC~8l&d`UHcgR^`e5F5tbd7If4L)d*L!-xihil77HjzQu==)8nO zx^`lNC9ap5yg<;zJj0fNB-mWPJ@Kr+iBk0COZ7?M8;XZ1{({XZq%;4?A|UT)&d`Ca z)X%E3Fd@A->%KRz16G_C;4Xw%1p#jwLp6-Rzh9wlMW%R3Yf>yzlH14$LiSnv(c0u@ zxK2PUM0gqPkHN9D;f?wMckM|7OWJffqzVA%q=T^_1;;0~8#UbpW(Ky@tD0)#+%0Eu}3&RO~TJY z3-25ml|bQZYtkqRrVFB@n?m(OL6R~mx4p;U{GrZ!22QQ5I=5bh^7N$aS@xX`Q|}fS zdnW|`nJP)_!dfM)R8+#GpqhXZpBmWr0lR7yz=At@R?(xn6o2dBwmequVVuIX@B)lq zxha)$tHkB#NB+`=TA_H`$SLm0j4@Vfyh%P~Bq50z&BM;H7xBycK)0 z+UEPrDouu3fiA$EtawtVe@SBl5Y)n8De>U^ELo!z6U2gbR7AWTV$_$&x@kEzr9{ch zY{3osPO)-XR~3N7JaGPF1a+^oOSC}n`kyDP_nH%=B5{+8iGtbv1A~ zXso3l4IZ#V7wv{qIEKs@L+&bl@>yVV1Uue593w#`Mz(abVFkyrUJDP8)%A09x97r7mGlwM6}2E8Qk@L@8K;=r#hO5KPf8T+1H7dT-KyPY8~U--;>% z7y0flve1{sZVeUKW6PM?2doSRLIKZMQRXajbIO2<0kG;9&`X$nXb3=f_|04JN?GkN z0~BqS0Z2$Skn0!w3DkDvwhU_PrQcSb`I7W`r^6Ks?~;Cr;BPG9Uu(YA@rm>DB=@Np zLR_?sZ@5jP4ZQLzjlotkV5&W{u$?-|PgO@T*yr1y(e zqotB^3|uZfP8sQqN(t6wyLnrQ-XRp*X6aY43d(#n+P*>?tZDxwQ+l&7csjkBy^<0@ zDx#=uQp+KyQHKRVOe%#@}(048%Bs_B|q_1+R8G`L) zF*>iE$2^C=^8QYH-VA(QMdTe*QPynZ{0;M5W5b<)D6wJd6qJHOHM~SZ{TOKCLxQuH zU+;Yn9T{wAE>k-5t$(N-V&srK(QqR;2rqA`FVKMSx$THx&C8al4BcMdV1^Y$5mUp= z0He?a`70k=*T`kT5wR3%8S%?6tl{vY4QPe#1}F4cN1pj_e74GQU=gMZ$`f5ODBGxZ!{|^Hxw{U3Xi2n| zQdI%AFO0zDJv%usRJ;8_&XC<}&00oi;U-R#4spDevi#=NU94+KMCtW~C$Q)p?F5#jX->STo}1RQ!r- zAfaVTPP73#4e8tbJ2{c36FSzrOo3vE>!U?8ZS}rn43eYZH|b7((p-p`CB-=x4Dv{+ z(|TFZ0)F%?bR~N<5;osU+lf~3DaTh+Oe0xY?S}?JOUwHXTFb^J)x;samp&7exNVvY zGz_5n6C)0HS}*fJ3A8gzg39CMlmDvZL-7kHD@B^`Xx`o{;jl*z%4juy(y8D_X>9pG z@dYk~M|;r83pRq;QTAt$Q1B_%drL-W6ALk0FS#LQyHzH}?z^NDVs%~h>c_#)Hv1Bj zy6U#-QO$eH5=bIVYn1nmKN-u3K7y%}O40>c`rt=R{k`Ncu~3^E%%s25Y|SI%w|>*C z7*qWi)C%e}UfHR1#;59jcrpUZ+?bl3h+dkC8Im(df`ThjXFpUhvGn+C>FWNPuC1I6 zOY~W3a)+5{8k6TY_kiO^9qW1xCSi*bQNF24y_h^>jUO?P{3w%sj}$)! znW*C%D2YRW+JIVZ^ffigyUPf$=@lP*WvsZ_vLkOx^gjLk&X;9el!?UXrVxl{n7gg) zUfd53s^5vTC^n+9v}sB9dgVM+Q^>JjfprqV8l`_cSYTa`L%@aWAVg;810)FYz3zS_x;Oe+NsR{`HqPqzA&(W35Auf-%u5HZz|EKv)1f*Xn~*9z9X2Y#J`l zDuVDe!*P40+k%d@e-?k$Y}ZQq5FE*7 z+<@w;h9^S0RIWDsyp?On1?*V|mq)_Fp^K|?bw8Txtk1`#>&ZX5?ax^M35wD3>~wh< z0x~HTIlS04`ge?%8-euA;dC0Xt^M2Mb&nX*=H(^kd(ygPZ@`6Cs`G=x_)L21G!+aG z0aCB)dFIxoIx+|M8|}Ic@dM}S=JmPfq5t}$r)%5shV8Ke4%k2C+T&tA#+}^IrqS2s!OWsmP63Frv-E0Gv~;c7zn?3(`r7u-O1n( z?xRMV=XbMPYhF6yUU0M4`}_^e3{IE#w`8uN0I6D=_xc>K+()J)c5obM@S54V23irNcm>s;Luq#uY-Uf7=3`L(ixHC49Hw;eJ( zEeoX0FBk8tO6Nex&%Q7Oeh3=m*skLw1R*$VC>@11z{DCU&>WBFe&lqofS~#`XG@}N zMQ4Xkk}W-v<6GDiNd27~quFreS4KPROog!2S)pGD%Ie~0_f=8{$hlqtrAynHSv)k> zrtY+(F=RRAwkqz}Jq$Ex6W>&8;m_R%8Drd>F2)2p6c`h%EnBTLB{Ep0dma@iKxVDI zaFHb#-^~I*C@hu%Cn2xAuTQ!N^g=QHn0?3(a2!Xu{uoXlp9*Q50z}4q{4lo4__5Od z$RdfjMF_ZDPU%DN8}jg*axbP4A~C42=faTOO7%Cw^l)?cjhm@78X& z%5pbIhX2TM!?jy`_m7!lHR!)sp4T=kU1m%f2#_dJ)RK(%5+Sw&8A@-WAIc*t;G5xa z4BHgu)z*g@>Ma_)8wJwCk^Yd2zW`^ZGC432nt>JXe)|DdUC>lo&4$~l|5lL>un*FV zbv&Ug1|E0ti$_4fg}ittl(Y zu~?B7xlO1$3SuRICA1T()+O>jO@4KH7^{a^qP}(#Z5Eo)2!p7XTOedXMs6|l?yhsE zF(tU@7Oe8LMv8k#U;>5oA~{tT@NR4(N)9D|qs#j+=&Nv~TWHYrM+9ux!@9Ib+2I5&Hf1Z^Mm zY9YkssVR^SJ*3uGF>8Y<9f2_jkW3SqF9@j>4?Cv@O@ujKsTV3usE4=MYC$m@bCnM* z?Osf>_S<+!m5SnAaW2Y6Qgy-V9h5u$E!-Q>n4kb|;8LVm#S#d)_j6!n+ljE}H#-tk zjh!!RGN!zKl=!S3h+ry}WYk3Q{kX@1z+ZZcQA%vkazRKHdq~zojx{_Ik>e#&DCKH@ zE}Gz{tBT4bHYsKdiT9*3=Cc9xnk+!3zD*01^0|^*u%q|)c)Y|s2sNCzm6TsRXQ^_( z@}d1i6Y>n-$u^A}45Ch>i;oxneh8>XH+DK!|28otHXLlBW0!N^8SQ5*v&s1F?-vOt z)w@A$t!pF2b*Pt$ZRB`1v3=7WNIP`<=Uy`%>^wH39?WO{I?hM>FP2au2QWL*+@OH^20|L0qY)q4N$STt_MO;BlQcqyn( zYaV}2M$x9bD$fSK&-xt`A+o6&1KK!a8`M>g2~}hK`ium|xkL?0s0972@;t7hAg=VA zHoDbOVaA$=8f2yd*Y33F`Ab>a&f=aC)n4vZsm|1Jy5mm5!SaF6&_^SE`9=lQU`AOL zPF7f8jbu+&bnJW5#~~_a$fO<{REGn2cp0YznI!3U zh*Q6dB429u4k}OBu;wnN7`rI%Oe5dvRIAnsuZzppt1pfYf0F+({33GV5WWkeD_n1; ztNrMYVM>7aMHxeqXN{jePHpiZunJr+fO2W9s8D>fQDM49QD@e9cTH}P*omikT-?MV zu&`lq2Vm_W+=h&DC_<*CS}ci=vyebJZIMZpZdr-6kQPj$U|8+3ixWOp?CEjM4`NDk zQ7oJYP=^dKcTtQOrhM^d2a+PCzk*rGTJb`FU%qjz%CvGUjfc9X){f=|1i91>R6$~) z5(UK;2U#q?3>Yh+WpN0T0yu1zk44@BQrVx&%91r;@o=v4WWk(`IM;Jp1s##msutnM zVKX8%o)I#6Q4T9NdXsh`VHo@f>9<8xDKc(C9Z#2~c`Ix_`>I% zV?{3A)UjC#vzIn%ZT-e1m)L>7_YR0S%YqI(40EQaqV23(S~0U^6?1V%?)quhT`=r% zbRrC_(ZbabTkg#u*o&h@9HIZtp~hAm1_iCY8Q6V|k-wEZdCVL?6Z+6d!9HW~ghZ#56F_I@}Dh^c@1@63k%=mFCN7US3quGi| zm<_J=S^7RE@if0bjA%w?v3TO8rW43$5rZ}6>jt^$vI%!Om`D<5=DY&B@@;ez+-P?i zqa;Ut3elrnz*gwMzQe-Ru%V$&;OS#oFQC9%GkHD_snUW1g~U~v<=1J|C$;d>+k&wh zJ0kGt4N!J8WTu#SD3%dqXPxQpR@DSPdLm`enp{)@+f^?M{A8pV64`!@kpU>xnac3e zD>(2UKXhUC7=D^pIq7PxCT3E2bjB9%rVoFFvrF>In1XaBW)mMx@DgCV%;h#id!TGs z)98r7)0#+58zwz?J-@t?+m{W42^$1x)yLD7i7t&;1t^mhP3DmMoPQRO$;^y-o-F$q zV@_*{&pezaV)LpO@aR6kSrrm&g`9p&9`|F#?u729{Jb&|W1p;;BgB#0;;HagI?BsM zd+r;jebzxJ(C%A=af!c!>AdC>?M>7BHbI7S+D0+7`l zs?yg*`EPd_gc=Y>4aR4XeIKqB?X!kqWA!yEe2kQ1G>DU6{&+cB^hs6J_PVug=u=nm zSaiA+1c*$TN&6}PPkT7}8?I&lk@bJ)d-z|T2SJ^vsPsM=_kDi_N zpGG`9v?4#(``IZ%hsQwoj{`)3R)qei$o`*220R7^`u}YC(^kR8@gMgD-G6!@^^pE6%r@ zOknc+bDNr}(+fMNYBHBDz1+e0;G{gdx4Al&vsogtRNFT_7Ef5dJJ&d^aHuneTH2ZU zPgiK*Fc!G!t@M=w##N{qClZdXmS%LReOC1llptw1D(hCW5G7^MLwUf@?4LZw(19dE ze)CardW^9v0W`tM*siB_bTFH=Mp57kLDoh8>0>)i1TE=0ls7V@`*ZVTDKr zC+;!!scf%;8MHM_G|;BS-i}|T3|itm#8H(g3k-DyH|>Uk=SDgncRbWyXvue7*j2Im zbU|%sW+5O(KJM>1(pH)eR!`CL$65Oooe+C`Cti<|H$i@k?PgxKnf5QzX!fGGeGR+l z;|}xF;3x5ZshO`htnlOpuSuDu-*n@~F=h8m^t3K$)8rm=jWAfL!61`eEfzLa6bq4A zYzUUGR^k43i|z5=zVW@F!&0l0E|YMLurT^(;U7)4OG%@ar}6RE-M66_S(T;8Mo=Zg zv695Vw%7O9ZHoXCm`6k7xLU^wHOF@&x@=1h*MdzozE6@k&)Rln`1SW!G1OTcS>mb_o2Pa?Jhnxz6$^Y`7BBEQ zr|ENz2qD9@MJkzA@S*19#hHLK;cMTY9_(8=M0_j zL2VcI5{9WgJTPA2ZWj5{6zdQf6JA>o*FoH8$)3yMEaZK0I`TaX6y4TysCMGlT9>H;XiQ z^F6<;uaLZ@$X`DN+RW8*Miea>gv3lmv@my-s2s~9uR9_s9^}AS!if?*6u9_X%`o8v zz_hv$3p`xHJgQU8AvuJ-oR5O!WQ_jUq0!{{tCqn$TeuJ(6;Zqx+=a$5BDyfePHEXC z0W2B5AnyGmIYK2jLHa)Pz?JdxDw>~kY)pS_KCIA#X)_AdAf6N_Y-XQc3!TOTNp;>+0gv!_ziTK(v*+g-A64fS|DGSMjztMm~ zHemqBrXm}c(HTuD6+Jg)gZ@}6+-1=T)3@rL1f^HnJ8;zgk=Vi4B}$vwwhgU)00K<8 zmNRMERYfp=5@o7*5@xe%sTP2A=|-xDF@;A+%GJZulzdl?bv_I({iigwcD{Wb1B$-A zT&6|^-l0MP99U4Q=`Z8b+A^nF1r1LxZ#&1@4wpF|*V>oUyP=^B-Dfme7r5u19xk^! zpKi74ZQQIY9jq=cua{b{hbdT;b^I*5`WX)tSwGmOWeF zcKstXezKEL8QzSP1 z?h1#Ex(i*uZ`dd{KLb%w(JOAng_b6#3@kR>OR8<0mx9bO<2nBtXrnW%_M5aWvX*0? zhayObnadq4IH+iGDh`6NxKdV%7+>nh5@kS*!L*M|MuYAy8ui1zPny-RY0fH~rm0NZ zp00zjsPmx*bv+^l=RE%h#;Dow0L^ptezI9WS34New zA~p0Jfb zpWVOTXoNfzXmqd>SU6HJk?BMBCr^Kid4PubZ*D>#*}9gQwT85J@rRbB9rX?Z2DP8a z!E~A}^yaN=BLC@O9=%FS#f4JD?9mZCTuR-L>a}h{(QV1)zMZaR3h~?32#0=wEpV$E zkg+%Xl6k|{7CvzczZKlAw{$rx3ljT8Q}iM$NUN%XG3}VlS`)iG-9p7-K>350r?Yl4 z00=eQ6hZzg=vq|Fb|Y}WFXA=S5Tg_s0&$u^njf}Yv2YD=?NoJ+IHEfE@qQ-o)FOf! zVWtW{#(v6$czJN7_&6V=cz_x*H3$}zGbtVU;tD6(W0Z?&w@$=1r2BCH;I`N6UJ@i( ziic1gf`R3UnZvVIi1N?J<42%iA3MBvcd+PVtx|rz-`O1@;8_JPMP|ar5_F_imV^SG zCkQc`kjByz*vKbwVvvu8EWjQ_3BBB9FaWq?g|V{b1FI*01oDf-#6abxBtT(hB!XvC zinGp-1#meIMHbL!MXxd;K4-J^CF7o>4KrqqU%Z6e&dPO65FOdDF}rDCM?TA6pa^@a z{}OHNrdjkwMk*t)_23|X;k*^dEyqGn9n7k>PfG$m$|+-%JF5XsA$4U}FRE0mV} zBp7^V5dW)q=5I-~D-#;%O$s0r`Zwaam{BEDdDYpi$sO%6duy!X859N*lx2;=5w`^( z+aw>e&5@*C(1oiVh2~<~sCnE(FG2If=@m)k2HKsQ*HZ7L9N5ujp!utoHo2!}>=9rG z7ALNLV|ctae!Kd{Tn~FEYYW^q&94pLU})Bi%Qr^#a3C1#oGrFzYN)rOXV{juos@sC z8%=yKdp_81d!o?x=Z0IajpJKO{syh;c>`@am;QBkyywA37pBKYXJ^AdW$$Z7kSJmJ z4nyAoHg!EzWaDp|jh}8_D+emG5BpP=Eef$xa8HJRH>~JC%(B}PH$@UC*_(ey(nrTa zULw(Dnn7w)zbg4i^8IxDq0DL+7=Jynn88brpw;vHs(ybn429-nD4YvhZo^2#q0h;( zUMG1Hc5U3b`;mVJmIdo$J>)!&Fpw^LtwS%M57&_iT6hi)QG5=r3}y~4UVQdn+n+kk zq)3CNHj=LuHdchA07JNe0E4})0E3#P*oh#v1xI>Ny4}c*wIGmQsl|QeJlR}qPIDa#QqF*EKo*iSnL!scS}etV7jxbipe8xUM%l7~)!05r#oI|ZJp-b7@iDw`@v+`Maq(QQ zk&dQ%2kPU#6s^pycH1Q<0l>ZTq{WGB*>}Ubk@dm=OufIBhfB{&U#mhAVfKFgY&9Bz zOP+PM`$PSjmdznE6{viJC;lS~;liba)hO$CaPfjEeK1M4Q&ne7ZSIIG<36VKkG72x z9i~DaTrAy*k6>7{hQk3@6PLS5yye?kNxIPoH{ocOAlrJb=u8s`T}AcgRwRB!u3evW z2LcOpyBDHXMjd7z?sk!&-`hC$rhk?cVM98%Yl^K|!l3yK8>2!#8@o0yuPdO&zr?}d zXp%5|_BES3GEOo6h*JJWqP|Q(sYDZ3p_9O@*6Wfg_m^L@1vxj4M~PGjY3K`(RMCjz zgkOl~M^1?M3rL8U>&wkBgB1?EkYS>UiaGy#m(&6VE^wxz7*rebiQ!_ywe<~60aPwL zn#UL+e-c|G!Y&`x`v++#Z4q25-vvEeEn0xsu%#;eebdgvHFHFwRxV5_xXQ*O+Wyp?^jR=i&tY?vL(&K@GGin2Q)45sGGiYuEI$-$ ztTCR^;Ay#UvTcVF2JD-}d1p|%r(dd9Xn7052KFe)0W_bWqp~<+@(4q6Ka4Ch^SY#| z&q};e1@E<*mA)N=qZ0?GjPBq~_Ncf*(2D|{DOeD(yr-g<>PR2PFH6~~TUc#E?B*6n zwri9$Ze+IUKng_NK|TdydeVqmX9b)jGBB1d?`LG9<`yYUFUU)r=XNUYyq}rHlwDog z!fd#tvwu_F2v#=AH+F>Rct^p-*W5-3Hw-L#vR#{mV8VJowykUh5!YvWloR-MSB-uy zPKjBxdSGZhgIl04Tt-8vPq@b!Q5Q7sYAj6@nxp^Zh^p5iQJ)}lV#6%YVsfgn@r2`z zj8BB@BrO6g5i1K9SY&!^xt}JBC?i-cj%!Tabv7@5-)D zj+DoA$8CK*!loO?p0fVRqDyd?Fr0eU&&cpJ(fgQ$m&i*R{~*3DNkpYSQ7t<1|T%x&|-c^9p8N^Tz zI@8my?dxTib1VLwbU!olG`s4>c}I(!2(MaSI94K^+4qffV=c%L8&IE~#(=a;sDVk2E4-g|mK3rV!0b^%T^)Set{AML9$- z8ILe_a$+q3g+$@RXB{JvHHZkY@cPhfX;9J+u>!yt`!b9)k1mulmAkd+`z6^ZSwoxYr_SS83ba8pH zzud`ke0-O#Q}#bg?7u&XU(sEsS*^FdT3lb>l%GA!x*q@-;tsz)`#pDQ#Z;j(hVHtZ zrA5<>zN$5+hJ3EW&of+VXuhCU1xUmG8*KP*0nY!04O#ww!G`~Z0)MdK|BV9w!G`}r zfsFrS6!_2W{bOGI$3*#m##&1mTbukuKQl71|8ErRRa)!T8zP8P4c>aX$8(@m?bj*7 z@V&Rj%K#TnRMbyHv1XN6ECG%a5DG7)shU~?}0@BABvs=3~m4ZNQ24_7B#jQG(~mQ4~-12x-! z*j8R9A;=>=WB@W@Y*U_1X1+>cTV`m#y`C)Pzu#jvmD@aey0{$IX9Z6E#mds8^04XpXF)#iHoV+D+@359xu!5@prM8- zDWQmatLlS4cV)ruVXbG4%p?i2c?990WtlEf|ydQL*ppGSrEE_6d46V~4`vGtNEoQ7eE#Z(I9Qd9^A z$x7?Q1GxqD;@;n3K1BuvlbFTkDm~rHRM0+o!yge9;7W{1MXYk+H-tM*_HuGk-eXhO z(N><`oZx(z@Zv#g{Tt-Jqsk@xrBXHisjInU%QDuVW^Itc#%7z&9OsbFJYy1v%*bDZ zPK3hiS=UZo6YY}fE6X-+ER$XA8xjz7p*oikB0VvaV1`a8R6-5mVabf`?-1pR>V0MI8URw`w00D*AU5rv6s!ZV{5 z3h&4B6t;8N_KQQ_JR7|QP9BKzhL@CzTj@H;KFDEW9TKN0bB}8&Ws&^71GU?W zw416IrHiYgd()&03CY3MkIjTnQq%0_%Q}<8kORas&o`mY}pVJTtm?*$26g zoA<}=_Woh5tl(vMKQj>EVrW+K8&i^106?Thx_C+V{3|A2{=n&(_Lp6kj{5etA^Dp( z>$dG<$Qf^4zqxP=vNWWdiR<;dxm%E+OagzTjQkA$_~B(ZXDrftMml%Y{_|GNYSooY z9E_!T1qs6^)UCR5pCXPK{Zzlo&c7@nxZ@5J(Kz=f_FcYeLd2RhwCnvvB!EtS-7+I|*0%)gf7B`@HzBvyVYKI^HYwr&P zY=aH({J?GI5 zoe)$i*22mS+-L#uJoXrKJgIe{4;32rr;7;AhaZa!)JKIG!jDIX8l=*?fPus?+Y04V zfRc9!^h$;mBT<1CBY}h!^X4v?J$P+J76E(WW-X30T-XC^2Yja3)rNFk4ISsT?oQI@ z+uskdSBl{=BoYl!>X6TwO)SnG9KPJw;}{dDfBU~!`>NnLqAf{V3>Gt6%#y{-%*@Qp z%(7Urm>Dc)W@ct)W>$--S8v|inc0nrnE9Ccskl*byQ;gRsxoip$s+=D!#>|QNSNiG zA{!8jmdQoKE%>shonZkGE?Qe!J-sk^1)MAvKebM-uaa6U&Q}jl<~et2weq2oYn6e8+zDmUa()quh^2#n-5l{7Aw&Qky|vZ zSI8|l(eHsj5gb4Uo;3>i7COS%}K z9_>I!kf*)JpONLj1;(XC1F)X4cr&2q8PYwT3!ty@vf-U2_&o+NSK5(xyq{z==8)lW zq(%=wX}^v;okr>q?Db09ICMFM%x#KSDOKoVz-dg@uNMUPs8NW{lVY_IBhxdrRN(4^7c@hqkfiGg+y6TuKAXeVp6rfFsFk<%gR(i9`>17#+9-3hD}O zrrfn-L+E0hp9r+aAPa1o?i^Nva2z%=B0^c26)(OYHlS~VBB#2Y<<0;U>BFf_+Ga|y zg`pPmJhB6$1M<^|013%;|eBU~)ESt?GicsB=GBPgO^1xTqmwZ8)7!N2SPIa~4z%wz4Na z9H!#Yekpdnf=b6&^a@^Y*->Ps!p9Lt#yXOe1(F`Tdq4sXW{1Fz4DN}^NQq~W%>ZtU zkx$+`&K1$Gq;o|E1&4AZ4j&#Fr;QF5hZ~PY*h+&<#6iL&>Zrkf0fz)NEQN1o=kd(A zF6%N**&-j9A2e%s?B1++QYgYB-cLE{t_nd50VXY+T0LcXw1;+H-rKSkN2FaXT;7Mj zB*REv5h2r4?pkTI1{K03u+q)6#=jbkuon#3QiC9-s1HGyD!Iv84m29uOT$L<`5>sL zI^*zk#|V$SwMB=!rHjYf-K4?Z(;;CRYX7(2x6h#fh!YoCfteVIikleep*|x|$^(u7 zi!|fup+0?VK#0Ep%IW1zdygvoqa0pYHOA1e;AFpqeTm`h`{u|3B#UM=a-i+dgwejc z!Zw(%rYRRK(NDIY^~$d&Zf;SHp^AA$Hty7p3qNERF9+X@525o}pz-`wBCV1GxNRGT zHhf-eWyMVB8~e77s*5AzMr%mO5)34y$@=5@$;vtx!IK+&&>PG1n9{~`((a#cqT zy{IXG)mYcPocmiTOhTtv97JPhX4%#$Cf~h6{JBSvQ18YUdTvBFVBz+m5MNd0_%z^5 z>o%qBwPJne)Xdjh5qi!utoMa;eCx}T-C7>5;Z%=Y;ZJpfe(l@658|A;sBG!%`nOMs z(l`wVZHTAuiEXQ__GN4DC9YUQb`cr$j0$7>{B?2_XH5E}v@SXE9NO}Way+pfi>lLS zQjuRHmwWrDf67SIgQRy;ph)zNKeluuVN`85o0Y$eHWsyfqQNfPjGIuu%nt6~^4BJI zabd38j20JF{UIp|124(CfmTIok8eJqBHE~t(5$tURafS}G^`_6v)+c*!Qew9+LzZ| zKb~TJ9QkKy(@En;@`Jq(&Esu<`?9;yhH#2UU1GmMbFL&(wA=gtR#cd`G);iy{=**(aFyi+;(L|5wap{vWf{e_ZK*%TgJcI9UF(R#~aJ z?zqp2ylLjs%X{Xah&J02SRz?y$(VK_OO+;JUHnK&-ym((U?hdD(~(55dc6rjZwP9P z^0!lBkdzW~GfFfk5Mrd7Df?Vn9TGS}%apcsLzzSGn4!^&x$i{BRCx5}_*ng7`q+PY zaYoL13|J4vI>t(fp2m7#_jPS*)K9bG+v1(v+DL3SdT~~}>Tdmbx|r;ErDp%!1Hz{r zkWK%n7d=}c+n+%2^y7N#gpZv64PC`_NnUW2%mmD+Z$wMGm1cJBnkx8(rQ{DgZ=g>0 zUTvt!88n+VZL>8#y#Drfhc5#(dLg*4;lSxe}a zDOVBn^P&cox7v+M8!E6L4s5)^gcaYnosYJ=f{i+{N~qOOS1CEMCe1-`z%|*#ERQZenQR|1G>PQkJaV0Typx)Pjbr@ByIj>VYqb6N+nB9 zu-y1Q>)+-fJxp?FTN5%zvx-QYuCOgKHLNGXv#*uU)$ld{9ugLgia$V_^4e=&j0_bI z{&{t~nH0IVH%FiWpk|W~ZTP{}N#_b5C26bV`<}_3`P}^%`YN=R1%x|OL$e%42aN?SjF&F zJ|RacL;AqjR`mhR}uQMZ&WN?YcPbRWV9Kk0TGMT{DxAv2)N~iU&&Q$K! zW+jM%&nA@oq(DA&lbx+9SM~5p*24VGOXdh29Wc48W)&?LZ0Gj&Kui|%%vyjf28MCx ztvU1^x+J0$QZ!e`X)Fxnu0bi#ENOR5t=D(mY-_Ss-IwwT0bC^R6xsgUu$pInNFYM0Mel0bVFwv}? zN(zfYdDop>QM)rF)pQ9JXKW%q1Z*ETLsn(w3fZ8IeF_C4v8YxOR$)z02|KhFa7Y*i ziW8)h!K>!cyW!)}V2tU5P>eA4V5g;4Ygk{p_8~n9j~f=nobpw=leay;6^+UP509?2 z7HU7W2hPw=xD^>r$K$noW;y*@fM8>8vuDRlmZ3L~dgxMSQ(mI~{A>w`oVn&zWz7%$*);t>sVN zcVi-N!*?|Z1A(_lu*ERxTTQwF%HMabSkh6s$AnKlves^4Hv9R`W7t@TI1$KNxQ2>Lu$sV9x zvoJPv*PU>h-Y%PD`uvTFmtu_X6M%7F=^dKZN}X~Z`haZZoZIqAIjPjz8&FrV-H(}@ zU`%@~&g&9Ol1E{cs(?o(LizQVD2GbKM=)ZT3W0Fi;ip3>bxY0gg_6Y1(ul??_!Eok zqw8m2;0ycu23ivCbTGcuMpbMZ2S$WQYR)p<#&+U~2+r0?ehIBbF^PU5{gbcl-gYPDs6 zWcRIC$P!ID7r1QJLeX9^dH~iU&!M>TlKlK8h96 zieO@wUl(VWpDn%=$-^&h+d7+>SMS6T<=YV|FSiHVL9J~zUDa%hq+?Ip(|*slcoX*E zz>ofN=Y0u54UV`swftZ|+)Fg;F7L8_MFsVV9JBIYpN~)Vq8-48%?rO5luwoa0$%>Z z&iwzwONM`X*8W%Vr4Xaum${nt-z?MgUxP3CgY{phVbuEv%KWFV`s>KQOCa>h%3t2= ze|hPYmH#1_{L?|Fto)A&|2F%}doAy1XQX7}tVJ&`EK09z;_j^T-x#w0?d$#bp!639 z(|>?c@$gJFE4wf0PGrT?kbLwxV8dTg{o@x&^o%j)573ECr>IBp3ioZ1ce-Ry? zCc1jZmFujAyXgFYRC?I6J`Ej1Al5+m=&Fz=f6YR&@}5%06rQ{HXuTHh+`V3Ya{Fr~ z?eKWL$R`kX4H-1@2u5ApY^hxqROJDMr&>2EoLD6V81 zv%GIEX#mxiX9zqv^j(B|53t`-)Cf5^!`DZYCFw1+?!c{_}hW$%C+8w zo1R$RIbgUGNv`M;v6OG7l7C(@EA!bwG<^n5>W0#U-ig@wJs z7sc!%bX2a-FaIF3fyxF4#I84i$A64-Y{z`1p*j2VZ3kOBdYn)sTE!Cl`7C76{y|HS zNb>FDT)(XgC8LSgK30jHbKFU z%Djsv1_Obk>xO27Cmwm3OHQr7Ztcb7l!nxR>f*S8siI2r#WFUJs}hG*b<4N6{&+$! zyouEVEOoQd(o8?HL>*Cu#r}Fy0ThgyDuifHxz09PlQ2v!RKH4}?JHo#%?3`VzxAFI zYrc#5kHRwgnOf@mIU^#ArB8N5ve@9!2L6^#7Mar4UX+pqpJ-YdzU?;W^)@+(J6bsZP@NbB_I-smY>9fQZdR>aN2K)HNwr1+0vn zJ}+2jK_v&Hv&AF=)Hz?`>&1z+mF68AfKy<)qn)ffWx9%l%WCH4}3~Pk{_uRwNd}d_?Xr-LuLj zZcu-nMti~(sLg#XsgB%K?_>6xTeDaFuI76p#Zhx%0@_pHrZP0q_VWI7Z<#~1?*e9D z>e%4Ka73-hp#9P^PehvF5@NE-{yY2n5utxvcAN_&A`1)B(sFTBIMta(4>5Aqd!Cqj zf}$^O+ra@S(k6;3to1=J99|TyXjx&M8{nJHNTn}Uo;dRT5|nQ2%C5il>d~S_?^5gZ zl<{>3@c#DWIAd{hn1CF7WOP&yt3~SJSdLHH>`s14Km%u%@=Dgo?Y`0+?0N>9 zENQmwTm1cS>%oiozQbT)@k3>C=XG`0CT-HGb+n2C0o_{BjUM>RP>J_Ovi?-xK#-mH zVU(0Rc|=FRSexfufWzI37rAOP0(H=qVNw2026oyBB_1)~+4;7*qcHU3L_-cni;Rie z-@%YIAr|3$#vN8Q?iwF}y9dgH<3@|L`PJPq>U^P}?AUeVpICfxlFOayX4t#iP(w5L zms8c<;5Zw!)%&s9qwQO=-bW<;16+^Hv6W7kHk}V?{D$hB(y8&ODF%Ot4Y(u|RkoL`e3f@!8%JJjj1?$u zc`W;!xfDaM?)9trI73r%ODcd+qt%v@luSKfZ)brx=OoMd@QDE|qrlepX^b;>;wzNRz@WF?Qx%UoQzALJt_be{9Kd}X@qf?UFL!i0yn(icO z=^Sy@o0TVZ*kw^{;SXX zpH-EyvaqrID=&}XCo3}p<9{kL6A!OY^>J5P!~$kz6i6hX3)0G?ld)}(A?RpD_(N3$ z(#S|knKU(UPs16}S!8Ht|32Tx1${rm z2dDk}*BF{DYg*yUms~&6N!p_!p#JZWu z2^pZ&uxYOz%I^7;?PNEHVDI-#)5`Yr?%Vz3GwrqYv@sv>{U#TMRl2>Z*6=mZ>=+fr zDfY^Jyewn!1@d0kUk1!^snKPl=5E)mul?IG>(8eM9MuYo_NRpx#hy2?Rplmfl*$hj z*A0Y74zbVrweS%?M?)TYdBK(zpTo)WH?`}ItQ(?Sm$!)OCX!zk_v1ZOb#|MLjJ0jV zEp~>;?k=#ah7Hl**&lP{jbGNaK5LZRIdnVwLXZpf@}f`>nG?KxOEOywce z(&g4+*>iiqO&XMHS1(xyeL->r{`d-1{SM)e|>= z;DVxngDqha;)idS#5TdM*)ur&4i@eZfkSjF(s1()v+OS3J1haiFLIo5vO47VZC+3} zPF14qr9j+B$Vk|swBK1nSpJN7#;l0gb_no{qQ7U^;&E1f&G6WB4}(_UKTwH<@r=Jl z!j5_Wj?w_3)-j~vnc`$axWHOrq{)Kc2&y(>S<)=Ka0?jz(5)!X-Yhv+WMasU%^_%VFjE?2;vVgly*y?dCz zyt*?i<%awn%dfZG9nY^HxIwP7q|QCq`qNS)i=>>*A%x~_WUhh9MmOPXow4uF zffq<|8tjL3uh=hhlD1|ASM!96{FCgHDOL@$*%+Q6kx*A-ulfwjj*M5zSWG^Bmwkph z86|M0hj#D9Xsu0#bDvmiVcH^dG4)>HCcPQ zPX7_2lB_%DZfk)|!t}4R*+{gO9d|7b$y?Y{WeLAIrS)5iA;WTqEhq$T$J<;R0xLtY zQlfl)`C?mRLTeZ8m}+*;NQ4#ReV$>E{AJ(X6B9M8>wcScORM~+oq`X|4eYYdy9YhZ z$H77d9|@)Xy^z=Qu9}mzw%|q<*s}b%vYTTez_Cx&UA@!sS3}&TMKq=H@ggwL!Li|%+ibV-IEbHQG~-hJ=n^F=O^ zlImNq4;RbVlxE$7_6m8fs4A&W>F9;lF7xt2MZF`I;!Kr}7&zpMgpZ3(_58gVXq&YcJq;OD37Xh7e{cJ(73dYV4W{IYsj5d+0v))7kX4`S{bHI*+4r=uKq1m z#vo7{lWKsX*YIi!OU^tn(Igp@Vu4X@X6O38AMLNEr1X({(Ac%LE8S=42(slHmZxsm zBwb)Ol)FJ8q?xl#@2IhjC&=`DYc)4RU8LTQUJ$L2p=uy_E`;c*+8|S$v_)K)Ml)GfC=oLhX+@*x{5;hNFH*4x z4Tg#Nj7RWNFKlCt1%D8gXHADf3SBY|eZ}5AU(w+p^C${JpWXvjpHS9)&qeZ;GPR z+=jS`Er*t@O(aNDhEKC49*n+GN^o}8zOammfYFrsj|rh@{~lruc$sC8 zF-nyR3-4EQyzi%&Bri3&n16f7z#VejAh} z_zOa%92+8dzEP}PF}L&G(@USfrgVzeH+7% zi-5FyOV#<)A{BDx#d6hRrufKuh>>Gz#5N@aVy>TRjaKHV@uB;`=5u^=TNVd8(F zTM9bu{Gd(}%XE;Do1K}&r;>ItHQUe<0H*B0?Cqn$B;>pc1%F+s81R}E8dVyOm(;ze z@KNY7h7_nz4KpnOJ!m$%>69EE8Etxhr#!T#pKye<>S^u=SMCO*iqZxE&IasJ6W{QagfO;eal!S zHsP~PlVBtXn%PoFj+Icx3bZ~0t|Z0iXK*a?uWraYSq%8pXbETIMvh!w9F`5O3$9E> z#1v#@VTCXar+1qLYQ1%|4Uh?Uu5UC*h8j)&*+4L>ci_xdFmSAUAdDdCG3!?bQz3r6 zA%lV&T7r-j43MISn1T}Jl9dMV!$@MI6(k2itRX`MD}xC5hm;xTmzE$>7WiK-`yJ_W zP4%l-p90c2+)S^0n2xgn+d-^;ppYb|d%2(}4BbZhMy5J7`&k+vknlI)UP6)WRZ|#q zpI~O)-h`FOvg-T*Y>y~{T4ddVe5eG4JA6Uzx97r)U5>Z)Ox$h$YX?_wPp_#yF_Ct= z4I{~*fdOOE!nO5-g|K&;YgUDTilXzJ?7eO$oq5O{z>m-Sr9!P;nPQ?j$n7?Fs}XrQ zVZoq0t&$s08!k$RSn{0aIZ@NG-TuAci>19$)VAxmW$ZsD zwn}v+vY0$yy+luQkXO$ca^xN(5G1g+xGHKWC@b|E3cZ^eytle6RwWA0VUVZaydI;X z&e!MVa4@9So#$YD?1y1|*o6+cz2!Ef>Bps;%7r*#%M5k<>?m54q*M-op~P&-v6=gm!_d+--*j?+Xkj)J0E|4`(_l@EV5?$WR6 zHf-bF-&iV@@vX518*w>6KKw8QtoH!OfuEsj1PibEQO%Qj3o*(kZamY&{%rvx(S|Wl z+A}3^C2B_U8~Cbb3pjg-MR9t0*~X2vM&d}6q;l<_o-gRf!h}GiwwdZQfBU#F&p1qq z;!B|np}P@d#y{L&Nn+t@X6#qr3fK|dwI&bikvXPoG(Q(?)biTP1Ufe-8lBb&f71*m z5*RmvE~ovi^Po4hG4^6whaYc9Dt=kV^NcH4`s8u8_>&ZEgC~L~gmBG%k044b5O5ZH zqkjPY!!3MSRVJuAU!4ntjoA{_@=znOIn)!W(YT(|pBK9|(8%rQYGbF)-Uwp+;j5Ji z*72o?2G;_%tbP!bSS1v~MB$~#SJT#nQ)5Dpkk9ObP|Z;XsqeMSH*Iz7DPf}h0LB4} zaXQT0w)~~3{Rf0~1B}*)If4TxNH0XA*6?#=m#i*S%}__1LrLkcpx*XI;19#F=|+Y# zTIkk(0=^CuG#}MtsHB4?IUWZ$E41SBSEQ5frZU3E0e* z>tyqu!C2sbb2Ts>$RBWlfi#$(YGL3lP+21*;rhYE?UBtsA{$oNFU2-5E2pld%OZ@G84r;zLz9}Q=; zaETyW+^{Bm{-jMe#eTF|vau#i55$Xkvl(Mg2e!&xF?4Mj3&h%yYn)gZ`*C@MUod|j z8i!|&`C^umXhi|R6)C-H`Qj>mbuEv2u10;R$6I?tN(7qk0N&GanY(Zf+H!{w$w!f* zKO#Nzxkg!*DT#3Uxp4>8Lu%O?x-9}kl?{1rc&!k{%nEPGUcX+St(`@X;#tgU_5nJ zQ|giPL8xljSP5b6!1f$7=eNh^D;V_xAG<2R>qvPr!CS7_Al*A>dNL~^G_^kb8GHYk zcNx=Hkf#n*5;ZrS9~F!>o;^X0_jqlB;BGqd;i>}mcEn;5+AgR&M0hdpdz| z!MnmmL+DkbJx<0EQ!lHcTHXY&7=mOE+$mY|b2!r_U3_RrRqNch)JLno#qJ)uWX+RxdnH z*A)|jo1c^wQacYr8Fp>eD!&^=Df(1ywuIL)+nbeGZd=;$&M|AkT;+5tm%-%=U$!+2 z!|QJ>s_Bkqc&T!tZw){Gpn)WWYnwZmonZ3;*dAm+74(d3iwvY8evacL#*F^i5*#u4;;!`6F^n8 z9UKJr&{*8zwa{B?IHyC!^B7E19fN519IveryQF+fAwRn&fJJdBqqN~0;;4aM^Dz@$ zTRk2*F#31h?}w*u?@%0hK^iW!)2`JCw5YHLQ-)+QqI3ri4eesx_I`(EhTkuIx}OQ# zC94(N!wl+CLP+Zjtdrv`F~?D1MoGX3;P5@>-Vx!dtYz={WpmTZPl6vT zoCs<`AUi@}oQb=FTL?NpUI|Nhz&u|NT&zY^xy0Nkq7ceOz63S1me}wnd}c{i-f~v6 zaPajTm8uEqpPjJWyA9u|Ew2PU!-lE*nCXj7ucxd1iz|+8K;`jrZS6}MerCA*8o1SA z$Bx7Oh%N`QyJ84L`;7f~Vc64e?kDK2gN^r1tE9OJ)7Ed2pE-Wv=RD8NO7>ZqC=+|# zIAJbIha37$nONGgIc^ef`kcq_>rGzcXlYk-^c^Tyzw;zGkkI=L_M+;vA%eX0n}3AW znYWM|V@~(?ofmjLa~>N+qlPw*gk9)99+cb%blD?0`xB5%@83rXfd2?(vr}FHj#J{R1`ba4ZxUh%Y1f>o;0kk(1KhyG)BEGtQ<;h z%N)84t-O(Vry(8{z$Tr9>-$^%OJjV)yXUwbZDU&*G`VH7hLX~a+$E>)Gf1w1PK+Qz z<3zRhQUaPW)qMv2Xz8r?@?_+4V5#Mz*kxBI%E;;fo-}ity;ZaBYU4ERE#2uMQ#m%! zBMbLYV4pimZWAoAERDH%ex=Z(JgXNM<*$oNYO-S)WvrwV%#(GxGQ_z7!*#R$a8>-+ z#C}KU91A=DXuHl3+sEg(^cQ~Zbd+b+sBRD3ZFS1|M!C0dzz$(w6=K=|sY7PwBZc7h z8Rn1{gd>iN3g*g~BSgue#n9C2M12+ph9h250M(`1c!%c%X>%&Il8jh0xq8qgR?4fO z`6+<5@XK=tpqb3^<_^&tav>VthFEy6?PnNnAH?-imKdTf0IpNYLHJ@ z5v|#8TIY*dIUZuNPu>)GXfKMM6auQ8?stXN@fy+7U}sY|2U4|$cj1Mxt&F3Y)J;T#6NvkPfQ~9obFI(btDqu_eK54Awj##IK`^AP;-|_l1Q4 zt9z%4ePdb;42RjtURGE|>DFR4>EAGuY)T#56NS64AtW9rHV?NP7D6j;+Sr@JW?@pX zJbF`#%%Km?GEM6!ZeQe zjVNEXOLEtIuwP#(9MmF}|u=^9jkQeiQGpY3f%{2m)ZaEGo`_f|$8{3L+c&``JnM#ui<>c|V zGv9gx2g>s+{9xV8S`$U7G21sf&z+QYuC1>=k({qkq0t(1Mvdqn>qGw(G+TPiL(6Q+ zCa2rfE%1s|5N_WaB@aEy0Cer%LKZCYG{X`q=O4(mO{8wMxa%LcpqEd&NTrE=_2f1i zp%3FM*Mc@Y+_6^(D~F@Uq}ji><3F;N^qZHO5e{7{$A4sHC!&1dcLa3>+&DFGr?MOb zMx0dp^=dn3YbhoHRqRo_ zF(g%-npaOU=A?{3SUn2>>9O%Udq>n`v)Zw~vs zlwA2idKIi5({?y<9FY5ROOAEU`8oasbaNb6A7*J9^4UF}^gbnC@2{;fq&7g+XCcDKQRQqP=V;=`7p=x zNZh!N7w)#jFhI_H$fYJ`2IoOqaie&^_lJRFs{xdVNG!IRGdOMQ)d&u2>p`kAG9 z^{u&l_w68Tx>tyirb8}d=XlMd>4ovpkNVV!wCjX#LaX=K_~wh&?Byg4BJ=Fu??A~r02aNvT~$2u{QyXi6V9R|EQnC{b_jTI_PG1CgC6|&>Fzw^+SK1_6-S24(AAgB z>YVY^n(_mY=`nYJ*V)`kYyWXfXYN>{!%+E69+UYSug%rq@sU#%g#A>plPbA{-vv7+ z&M7;6)ncyU58&PyY!{53=Az<>Mx`ze+(ytzAMxffo!rs@rD5BbjfB^(9CdwTb)kVfw6)?-^BP~7=He& zg8&=IHGmFE4&vdjN%VmZBMe4ou@|-o4ge6zq znF~%y?*O{v^OjVv`pnp+oNlVMHFo}DwD9dceSlmY61vuQ$Eg@7UXSu{tF~v|vr5d8c1#N>W=B40Xk2a2YMBRmrR1dvtV`4S8K_k_fpoU9ptU4(vo7-EYzE z%)}mYD7|Z1v3&2|FUi}qs$DEG@D{2~`P;PLvcYL)Sgz2|9%4W*lH{3!iq1vY1!pSybOS>`TjG#fa}3g{5a2OtQBRvkiND) zkMoOk`fN=47{`x20`sol_(6l45&$Sy4UGfdJ~3CtS#n{+)wx)X9&ZbPZH4VSzdYUn zvlz$gyadonjJ1qeFIts|Wu0|?jB69={DKUo2c=-nHeduW0f>_&I{(w_}JW9y}#S4n~nf}ZfF zSifc^9$eetIuujSFP`AZ4-w*XphaY<&OZj54ZH9p#|WI=08?w1myuTh$vk$?W;Y5- zznQTc*EO53DOR25cxCa^P+imh;i@5g-;+7(JN9kmUQbRBQ}01A`Co_aGsX;LFFQu3 zu&Yt=9hRP#<1x(h*76{GDR1%E*0n(Qv!*-PvXJRb?@%&fw{Q;pVE<$xV;P=pfrEQ< z3BG8|NlAa&JNy3#hrh|l)sLF3LKuqe0+hSYf8dc7_P2{C{2M<{4e{TMQ<_+e`%XndBs?wmBYLbRaB`VYt#4E{AW<|c5 zvDg#*OcLydtOm2CVlKtnDjqk$-S7;*zkI!XRoOf0ZAlj>Z>*hrPm%|5i?>UT%|Up` z`%bf`rE_6A^82)nSJTT`<^&VlUi=Ka_7O(QCBU~^aKojh)zlINV1*J|K3I!P-z1;fY1txeaRl=(uZqd-P zs6w3Dm?KoO!H|-w1H&6+I?Z{PHWqD@fo(pL5S$-{{Q1Cs^|Mt1sU|04U@OA{wSzLR z=id52HG9KwhHVnp;DlLsgCbL5V>=E^_=cQ;^Qo{M0nA>s@7vmT)~=|NduGr z$}LST#~7}=*jiP=DqjjCTSjw=FZo&XvCK@OtC~V_=6!d;1V^Y&$k8n433jO!!LDB} z%`2~j_N3^1+jo;_Vi^$Lcsi;mAZ@&}N{|m~$$n-mglFZ&^nq3Ex$7}}gd_rH)hw{y z>@8_S25&~h`0-p=#51 zW@6Hq=*o~zPa>~p5uD5jHfC~E?DJ}mw+I)&4^%!_^@xfpsS><_Sbqx#zIXFOc9~dS z7}fPYzE7uJTQ8Yf9&lKgJ*Bu>5?sV4xK4EIe~7=@Z^5Wnzwboy{Gne=k+|Zy*k`mr zqwVX?aDrzO7kk3qVs?`d@_|417VPL;ua@sN^OGAvbotF%ov*iM?6+-S_Pos=*| zwiy!{DS7~YfyqMTs2mlWl6b=!Kk|q0w!x3rGpVA7{&jYmYG_uD=Qj}juDfKxOnO_X z^EU1LBUOil=c0eFYdt!uqM$V!?}%!ZW)5xeIuG;t32q(Zt$++}D20 zt;lL1-)e}kJ>-5*;PT3&Q6!K^;DlUx+LriTRj!a`9n8`EoxdK_xu2jj~i*d_|T{Y=>P*fgO8DAdRr zTxgAMH<1(HS$+$2J4ISD5^NdXx*0(_Vv)si$?>r3||`q-7`7Ra@TuG?#F)>v1^O zof0=y%t#@8adY5y>hG|WZWBpGP={>|X@q#(&PG%2#(1su#i-eq7@~YS>cuYJu$%Bw z5!k=Egc;SGmY1=^x)Az2MckABn=L*AR{<~_3B4sm!}sb@sM7bN{=_|^HiTY6Ak+ws z7>gaNJm3meE(#jB3SsQ>LqztPtHEnZ`()7wxeC$l7wqRxO$@4^n=p}@(0W^vnzz`|6%Pc zW9kb223<-ErC5RD?(W6i-HW@syHniV-QBgg!^PcQFMg2=7iYWgr~fAJn`}0l%!hN5 zNltQRCYk)?c`}$qY^wga(Go%zT2zx<@DPnErq1>rLt$lnAwbu*4^Mmh^C8bF%bxuU zfkT#=E`%KJs7b`gAHL$X$S}mP+E7^1Mgf(4cI5{%0_!i9MUKOe-~2(oY&ub^o);Vl zH+&#GR34M?IuOzzcT;a1F6so~GvnpaXB|~XnB9-uEoTK(ArVC8e$-_0Qh3}e^<5aZf`fp&KtzPPivG9QL?$ty>&%^AGC&uxOy>tjxG`8 zSX~qR(D?AmvGWvr>^PmO^>^@~0)8HkE1}M$V@ix+mw7DqU9Bz z>S|$5fKJzQ8csZ3ot|~H+w_pt;<$}cyxU?r{sn1{-_b1lN%)dgU)0#pF6(lJsMIzd zYFo&td1^r~{w87g7<6VfrtDp;;Y+mD1;-NgR_^2!K9yiQecCI5up#;`EMHrCb7}Z` zuSdNYx9Y(dVnz0T$xJP$In?8t_)e`GuORiYt8z%>1aMFHzZ_2N$`8oC*W*jocq6tZ zj6xzr6+r_X&TX3y&E6yuJdJ1HmZf)hOyWgbx$u%B7jKIrAkDTQKS{N7oy{cSS(Rp= zyj_2uxjPy4gu$OZipxHSn1*`=x_6G_j zKh|mCZK`bmnu*dN= zY{Ixlc=as^pkU7_?-ZTh?|kF<9Fj@^qKqF~QhtZ^=D4}`6i?G|n)^$^Y|<@WV%#U( z7K1(Sa-pK~nYz)e+8-d;U8)y(zUhnCgDvi_?hm>+H|tHM;gg;vr0tNoIEuZmuoOduVFl z6T1cMqlAj}ohMbG{v1&Cb%Q5e9hFP+b$v)~{O8Vhz)E8O2)mJt^!oAw18h>>b|)x^+t>qJMig9eI1fn|H~Ep1ZPawkE8!D0Uk5ZWdq|6!8e- zmM`}X*1nU7?YWzmAU?Ur63|V#!nv{GQz+@b35!?SMrTlPoR(z(u`}#O?;!u$x3qfr z8Qh;w7c1NdVDA^`TU(nC8XsnCvhY{@m0(W!d|n6+d}!Tw&F#oP>csOf=2D{_G4$>3 ztLlt}70Rs389TIcx~06P+u=UbvyOr zMn0FbQ)5Sx+5iD@tpBSo%! z&LvUl(cLA2jbRbhxi?tn1Woy3M^K-q?S5(P(d){=04f`(j+&;19hNV-@M;ny&^?<*q0j2BkgRpIsHC5uf#{2v+zAXE`BF##x()l z@Wo{O5x^xxn-Mgn?j=ytgU|5Pk8xU^SC)IYHNnzyme&kQqGnlIJOcwvu!&7#}9b*S4Ez|q-94&Cy=t;Z2_FOkKHc1cL z++aQV_l9iN$Tn53r~0Hg&UP9+(j1-;qy(%Nbvk3!jOZnl3){@!iJb^)StV86r|Hz5 znD;NSeiA2b7(A6L$oecAF-9MyV}7e}#szHAF5o{l5?qJE3RDgss)7UvVW=nYZJj-> z`UNk&WgJRKeRk;x?TTLy@)6d#2m-GZzZ4k+oV&-p)b(1Qe-_d0?U|k6Ki6G;*cW)g z^KWaQQ-clLyL`NJUo2}_5Zq+BG{j2gvNT?&yy3L7b9HP*--IG1Mug!k&yZ!L3dNX( z^84m|?wf`V!X#A)U+VMsoC)2YO@%B0ub5~^;%(3>;;SM&l*(EG459RE- zZFkW*+#&FJ#*6+9TaK3Hx^m8CdAbL;FedYH>r*aY5f`Hy&;Wf?mXLi6O05s!ZaeF=vp;S}HFTFoBM`35Rlh(8em+F(&DjWrt1JC4EzmaK||2 z{?v_lt^`~K>*?2jq*w-sU>LPdlh~Q$O#6e!LfwVcLkYKdarn(|=!xu4o+tCz^f{!| zUu4-?JmyEVVUM*SK1)_@#9-R4?ecc8sQ=^>n=sSGx+GV>-X&WL_4;C|+lXIPLs7|n zIlsc8%ru1)!AowRwYw#ly{GrfqVgL<{%5}PrZ-`*;;B0nD;DrO8Tw07_`;&WSM`Cq zfp@`fbpK^jA9akpL20(7g!98raF&=qWw(NcJ@kb?u|3zt9nLG{o++8RM_dnf8ygpG zdJdNb?+gEwMsp$DKFp*q!baw%Wj1Je@@UlHkR}VoHO5@o$GV!TX?h*@4qVtSn zl}EytqtE!msv|d@JXvz7-(j`Ce1>bcwNxY^<_(fUN&59mOqni}bQv=%XU$}cqG<~j ztKcVMJ{dhau@PEJYwNd`Lft-$YI-%g)_|5sTU%d2dNna^tqQdn$CZysca9vy4QZ%6 zV9?#hjyuQ)!tCDY4#K-P@Nec~_}=%I7jw@W`60Nbkmf+hwXo43RwI+}2WVzgJJKG1 z8a}SH?%tJj1UC%XUX>HH6U6$?JVl8^ucN!1L11hMs)xYczs;~Rg(itW*PjunIP1r- z=ZJ3Cl*6ujy-wx8$5Da!BpPP|k2FnP#d(5uHfs#@mO)#my_DzGJIEI`pr!|**Qx4; zWq|WnCz*v7XsQNZ@^#au=|*qg4)!SYQ&dHsU7p2nW}OnH^5srx!h!S#BC$RdMo-+F zcZUJ18A%10E`idjS1RfGur5FVhpG+tH*OvRLrH;5V=`a;%-t!*dIj&_JlGy9j1;NCg(cPKi#!b3D1m z8dh>}ix${`DGQKI=L6ne#_U!|9yP6#bI;0*_ov<%^ZvCSkO#8)f#o<1Fk{Prep>e4 z;4xq)i@b|okzZ}A}L=a9IO$5X6coC zYq2N*ou-(bi^u>2`V`eql)^)}Sy4L!W&btpk8-`5)9>`xRRDJoBI}tXF)rLe9e5FT8HQS)MY#+8zI zX^u2c;6a_RE%z-((*BF~DOa(GmF#&2FQ37*RZqn^{vEx`r0Nu_tK!r`Whah}vfZa7 z*vb3ucA(XLmKSP|^3;UXtRJOJKxaSAJwE5vwN--tEV*cZlLusg&ewg<7w)Hx6d?^~ z2LH*qG~w-57XQgsFoeEq(3A0#tp|wi)9Q=QU?0IXN7u&DAm}smMHHd%wqvtpfc}>i zlGwqOAD)EdUFeZ&&7N2n>b`ZVH)b@z-r#rm;YlF|=0R_Sm$QTN&XDKnUTm(Ue(8Gr zqlR7E!>&D(85IDZxCa5r^F&E-POnl6>^8ZHK5_YAOFL1xmu^hE^5qlv67W8dq(gzn zJ=ut1Z%yjZaNDykUpL1H?tTw9g1>6d*9b3HxAXo>jeqR~cBmoN|DW+BFiI}E=b;dT zwTDCVJy;t7J!u$~c>DJ3+IYZN7hPls$;V3YZD~zF_IZfk+;v*CvfygffT;y>s}7eMuzS;!4$O&|lqQy`e1C=qC?I zK5Uawai^FJgGEE#ZYEM*h!AXu^vO;|?WS!8iJmA?4N>^nWp$ zWMFKFys`ORO^`6|!!s3)!;b(O0ow@RIV$c8$;hXZcIY>X3zWg|%*zZ(1ClamTUKMh zJ6BqiWw`W<47yPvUk@RlXgn=*E#ZNN?-$Xk@tiPac&nN!xJkxke}R~C+R4OVXtG$uQw_r{exDGyX|I*6-~EX}PW3)~iKRKWbDYErpMgNSukI!0@^4RTSn! zAOEn7@QM00yzhVj?9*^9`r;+$r(c>@6-I?RC&&_7Cf&V!1~6JmJTo?yY4>n^W-$w^ zsF7p!O?mm)DQpF$4ph6yGXFwkvIsQBW+XUF#cW$g@8!g3mmMf6Hy9%9I!jc*u+QS& zljVubgYn|pNd@Pj41>RET}d&*LrIn|*NjiZHwnLwj$=$!RcY(+6)Y1Xv5AQ!Sxwga znW@sWQUKF#%Jn(cU+2^;VJwB;&e!qS&`_$l4O+!sFNsOj9_-hlU~UMm1)5;#2A&u9 zK8$#G;Go@trT$u@nQr-wPo+Dr=6+|*CY(OsbfL;CC4Ceo>$8C2-z(bYjTm`*;WR$8# zj$xT{&9kTJ@Rj$Ulv*LItUqlatq*e$wTfNHo5O6;%Gavt6h~UM9JfjiC8p0-VnOxv z=XJQ@I;Z1T4`OB((*@7vF1WrQP)t{9*+Y!HK4n$dMQSK zrfmZK*tpC-gVPSvBWClY>f+_+S_G6%P3L8blZr8=7!{8RrTp{9(TaDH{3M>~Pd(@D zB}b)Z$VVUZ^;J$a=Nn9JLT2Yo9ErhICO0N0X~p{Se)3+S%DJWUGm;7kxnz7o&(x>i zPAQ8ClL#o^!ogUlVZR~KTbP~eT!i$W?&_E z3dUcgb|@~%FNwD#I?}-O^LFzL^YY4xE((nnG+?=TjzqDAeEj0Nq$>*F7#HXL0LcN>!W6kF2IsHaiIjEXD1#d=nL@TYgh^u;p?UgcY6^Az*u zQn*ri#9OlKg1S4RQ}$%8Nga~sQhB8?c_cNYHRLtKu8I5^czpQJUo6>9VdiP(?~7B4 z(Tn4fxFrcm=_%<+$t4|?qkod?N<_v_9+qQ&5cTpOWYJ)9#^%j^#+S-WEQVeWu&F8_ z>ZA%27RjIWm4?L^{PS7ZYMxZ9x+ciG9r9glIAT4Lv#6CfA0vb23W`SlcjZ1oO=J}M zN7)Al1kt{m(T@M^C@cH!kc$$?Wg4(#g5I87O-2iFQkF?gC^3PnAdVU%X^j7!M58Q4 z1Nf{gKQ37znfXl`;G-<-qAdDd3TlkflPv8p-qVC(jKUdUvrpO-o@SDeVFEKowh$p+ znjixhHlZ0KQURDGaAM1nppAWD$&Dt(OlUAckOGONh!w(u#GQ-6bR-#wEDwEHBBRMj z0S+v(WF`RxCgGAy_9u6u13#J4abZz7q{yjFh_D3{rBJ2F;D*Gqc>{waPG(|mhUE9d zV>BW{h7z$*8T_iP^uout09Vwt~Y z0mcJtXglp+_vNSE~6N0ws^&Lz1rve)#7VOOJ5zkAMeb@IkkOAEaXU_0f zffK$@ZEvx!8*hGuxCYM&IqYB7(%tb8qo*^86hzc^2i=yS9m0ydIS^-0!I6~ZA<!w_xqS-U_x8%__9oqPHMj53mzlC`w)!m#WZkqAfG1 zIZVwG^}k!Wu|l!m*19L7x;U;uCrV4;-lwb@qj93vlv0bYAQKtNkR?tUBV$RBB{~@6 zbV9+VwK?|X4lPddFrsuE60aU2si(?uqKhD&BZ~x+`i2baobhXMy`sDY^hoR!*w1lj zOdFq|ftjDb;foO#BG=P_iXgN6&6t-$opTE(c67tKPIE3RdWD>hXX=Q*QO8{9CSt6h=Lr`ZO!viKqylhqq zOo{P`4CZ-8e2bLN*RQV>D9W2vKal}{<&Nc-$SY82=%_1NnXNiO2eWv=b%*N-&J+cm za4kmU4nzt>PfjEIIuw0aw#h;YimKLF?>BL%R)qRfV zq!h5^jbERTQOqj&$eE{X5T+IQ-it-!2%Y^7Wu-)hkqhZUy@cPV`Y z%D~pR{KmpKugVAUd0jDxU~bn!v9JzYGgtSmX3lsS(2(|m&<)_U%Vzo=rev~+bw6Tu z@Qoe3Wp?ok3CGI(F#y9}cKn(hHw$6-&s(GkfZh4dfOO=n2J#hbK$S1?W#nseG#XSp zEfMW06c?i+z9N&Agu#hq9ha9+U&=)cd@*cufLF#$-Ac%-SM3hS{{(c$MenF$mR1D)`LjZX9s$jQ3?KQkPc-Ah&r`2*Nk;JD`PTv;zLlgR zpur2TLMuNye7ifsPSyxvNBM}~5#v)13YW;F^kq9YDc1WhH zxFym|J><-P7+k+d&atcOz|10}R!%&UQ&!S)Gv9F#a+B!ox<&Hz9jzC>)0|*DKWXX_ znV%S!X`Q$onZImgDJmaK$k88xoU_}+E3-85D1Vm;USCd4)XdzPv*}rG-d-`hYY8SU zu}l#n#i~|-FWrMvg=r|+?o(i7$t_T_zp6l%#!7UH4dWuQFajfe$5Dkj?nAOcdaz^P zQGGz-TEX)_KgixW1bF~HCW1cv1;{o5$GZ5hAl$hwY==A;v?`hpRD(~*;$CL|9Ted| z6p#P26N80?<3G7hI`REhgG>maAiwYU8zK!^x*`TH2u`b7sh3cA+ER#7Ha~&!ZF&14 zNEG!#?;i@UsXfh%^dSh(BXh3lhTjgcfq&~oP8<=6r_mpGZ?{u61T*3Akpzh1JQpf7zm^_B{wr?iXAL7!`u)UIW`ve$lO0ITu6r zD*y8uFmtjo{RfzWi;4X|G$oP|7b)I0DnRtU8Q;|vPcFY9 zO}*kgw9H~hy~Djp1r_}lEw~;z}rLWs2tMaSKsL?{T=6swTN7oc&@nBl}YKjy! z+(5U}GFeN$!=`EPYtiVpUVxLwUzM(Yc}=h0lV_Qr+Qg-#rKr(8zSYFGVyV)U&F6xck((gFlOOun0v8BafZ=`Oa6=weW^(yHLY4of`v% zkL!QiUR>4UyiT`PV(Lz5zg2JjqnJO%#6Dcsgv=9C=zDr~cN-l+AEesIskYceoeIgc zim5tEzQ6y0ov=m5=Ve~I|7{Ud{*dEg>r0Cs@Ta4S78r~kw-_M1;734nK&`(qYB2$mHIyMMB ziCdlSuYf^CGndtTrLrK+rlM}y)FWxgNVoR6ju^v~)(ODz*vuRqgL5NKl9$=>VXSug z5xJEo&#Sdo-Tr9FpZvbzR}n9)e0i|A-dh?GBQYpzf{vc?g6+sH2qd?Yh3bccb2H%( zy76j3ca2`~R9j|EZT-?+2bL8#T=K21BqA-!kK9sMO?RxTWl~egPEpkv9JhMX$*Oi@ z#M!)BFTA)Y)RNSn_X{Kn_s4&7*C%kL6#DT+=*Q3pAqDGyJ^g$6e;u#(&}tqi5?`U# zMkI)>nL;wjT;l0&3!19~RG;j$)Rf*0wb+YZlbWmhX~_TNCT9N8l6<0~mr*RVlbK-A zSn|+Pn|(L=-}99I>WWH|9Sxf^x4tm8^!GMA7z9>BZPeN z=y}e?y_^<<-MD_UX2ij7+fndxqnZ*RYCMsX4CpN?#6qz!zLN$x^##WmzjO@LAUz#V zoRyrPJUCQ{QL6zlhFN$gM6z;H5T@C7y?uEJrYoL@uX7U^zb0nXOWiMDF+=KSJWR|vq z8kh|;MC;+gk-kB6$zFdPpGYpX=N7n{>PN*C2Gb{$ zAy?T!J9&L{16bIjG%n!_!NXDmZ8Io8G;iL+hH`n&b@zQl@+Khv^2~+>3j5nK+g-TE z<#*G;Vs5UZn#O~XYRi~NBP2I~*)QkeLxl+%1|)dcb;j0+LAFT9ak4{eW_-1yrC>!} z`8Iy61xUS|l5dwkcV*LdpNol8Aa=r@L;P3Lxl^n-1D~T%3bp>SWt>(n15pVLcLQE5 zxGUlFPym4QJQrd3iT{mskCXLE&zuU?_1UIu`7sV*?X994zGXn~o0v z?Z>wsGu_9HSd?6E0-7tn=(u2Ff+EJ-*G9-YEpo$SY8AmLQ+l6KCL7zHdXI~v+IFA_ zi-zDd+gM{YO`{ijrApo4tuNMK@5yXi2=g^GUGZf7r+LrIa-Dub=?F2PtXekK;T~bX z5}50=yS{#i(&qc8ccgv#eI=hRCpo$-Z5eF)5EQ)1uFnZ7QQS6bB3{G@tO@w$s$D$`TeFm~+@L{K z$QkK{X(d-qsL{YJZTpt)IMU0#E^zVfK%Wzz+vP3Cdxs$lf$9BV!+nPj=swN&FGqB> zLZXy~-iF;6^C%y<@u}u>b9zr<%Wxhzj%01)It%74e5S#sYF{AA*PG|E53V7JU!l{Y z?PIGpUN`>Y_bXEJ=m|Vv@&FuMSlJ@}FpGIAWn#;CSyWa#!KYfpyRHBm4>UiSSSilS z9psDJQ9r3lN7gS20^|AO-|c_p{)OzTx3`mzdCGK*FoE=$tC}kHJz1}e@Idd5)Jtr* zF`)c3{PzJ%g%QB~o*~*nA!n4^Z^L@d3&9xJ;Q21;{>i_vc9>ltwz?fQb3JkzT6>L; z5p~80A+v{tWD5WHL2ux)#F3W;8$s9^$z9-ndS;wnlwWL51o|Ut51wjiY~b>uq1eg+ zFbrij_;ukJ|fTW9W|oMX1vO2ACO+knIjjqF|BTwPyN>akwgjzQQn z{#)+!To%~c!C!G_C(t|UUDUs{7sodLT@tkKow04@0o!)b>A1rm!%0{Od;KE7k4RXW zbs~AxJ^nE6-Or!-OFYIo{9H-&D8SHQr<`x(pC;%HpXYue<>Sjr!PVc_w$KXXShe>Y zoR|5eIR4v=ngwlkf!EnG!p&bTj*XnmJ$rxd&#wrbFItL72StJ)Ti!F2u*+ab&9o`& z>2{||m6~56uom5&;o}jX@LX?sX)PgqhlIbe*mo}QB*vD0D#1a;7#ayJh`WgdsKv53 zt|$z?cV%Y0{dDUZKtW@060srq23xYRof}x+bI25=W^j%&UXDrn>$R7u#?O z%UFc{q>Oc-;qa^2PSJ^?Fte?J5p#2W;P4b&Dcn@jdE+X+y(1p z+jx$77c{F9j(buKnh{CwbZS2@j%R;0@G=EH(WwFeDB;<$9F31^R558}p*FBF_`Cdv z74x0 zfQiedxNXLGfaK9zgV{uzn}`6&=tmYOf=KHH#)4DA$%LrnbX#PIPF`=ZYDGo_Y+)niNb6wgDsn|f2&$^5~!&jv@=DBXNNL=zZ>oy5Jp z={A@uDns>r5KE6&q0pksUNn9lmY(vn9uI+?@zZ2?%0mDCE4YpPT>e!2(q3w2= zr6lZ{T=>spyj>c6s7{hvNBwpD1 z!;dsE94ysD z32v#Y#qGoLdz_R-N;SYpaO+z9Zfpm>eZgb-x=;Ob)`53>WFH4k1DjExn0YLcvI;FP ze6?bMsxg&+1#c$Z!K z56|CEcxbeDl?oY>{h&o*3;iCxg7p55SN58)im?8ob2B(MDN8A#2shR45{O%nYqN(U zNma)U^F*_`vCUqxK#S`_cN|S+QsH9Y$E@>u5w+mrA>VI3*1oqZXw#xZGI|zOfcIq|jZJYY^zd!q@J|z@uvHtmA+k&A8zH4Iu=gHJJh{_w>We zOf`hn$W!fi^^aGj7Z;>+!4c2yno<{gIPKp#z(<4mWx z6GItX;J3K1S1Ud(+v=k&pCAsGCjl31X;h<@#GyIJi7n z-KWLyS*<~IxBfonu59<_t8aIfC>e$aJm0xIvi7xCHbH^to(e-O4Sxb%%2^j2lF*6y z^dI6Htca#Uz0Vg@gJZYPrH|;M-P(y-T-DVBO@`V?YO0G~Mn=}kF#OGU!USE+yk_c< z3ZkeNlvUL#$_CGO{4{Am! z(g?lX2V?j!Pmk-L-S5wO-Et0|8O!V+M_W~a$kpLqtvPYw(wUqL5Gk3%B-i^* zHe?&p3Grif-f8dM34T)UR*=r|HWhZg_`VEXmi9}%HJN4uS5%hV50<;n0Sjh_bmyn? z1%32HTKG8RVYOlsmTwFw#Yb-=zySAPLhU-!=yuNY<4-@!n+!I_t{>ULCD}WCwqOX4_%BL2ywAh`A)^J95@vYZHV5B>8FV4H%#CA3SX&z}&SIv(# ztdMwMo3laPp$~O0l0gpM8_zrZg-CPVJkkkp)G(1?S2t|N=*=-Pf7CNYsF>ic1j`R^ zRlLeteuYA2rCkCz^;Y7UIVMcAeruqE}~(|;rkA*5l> z@350)`;AruDmur=K5%VTs`Lz(rGmZuYvRRK{2_N4`I_IKhw1vQZk)kNl;bV(B@O); zCjX5`@5eZP)nz@`bB-6QfCa%s(u&0DI2qvCBSb4nxHGA3=IS6t+uS6^btFrnS^^Jw z%8=MZh~ep3u{>@Zr?Z0BR`)?ZOm;;%D@i|!dCdo!YHyaF1nnmqBJK8bQxttf@DWx z6oJw)VLnx}6JD#9-_Mo)<2r1ManMVK*arAXsa8DRE>g!@3#OH9GcT-f?aL<9P0Z2V zCkr!qqj=}6M&75ky73rhv09{4V2OFP8o`5QY1SoGuPUW%^QeNp@&~y|L9q-R71q)F zh-nxBbtQ!MB_}baiQ5xUz}7XqfB;(QNouuwj*dUz{;ty?M7|&osma)yZUr)52Ydgf zLw%t%!}1|fKQW^lh>T7k(hbGP`hb-^>il?BWJKmtzug~}{m1?D-!x*FD_b_ljORau zX&QMGR!hWZ>RtCEK_{|c*A^FeUeO~^OzBs@f?KYlsXzMVJ}CA~^AITQ64T!x&wu3H zQ16&mu9H?jD0|csCNE8{9nm(ZkBHn3k{QlH(?-^=M&8N@arh-fePgYm=IAE2#TwKX z@-aA>h&OT%Gv2$y_TrbeiViVRLA`iPRujb;ua%ah&l7<(bFagM}xB3D+;D4uA#F5PQmZ2Y8Y2**t2Jsr>$3Z{#L>QqNr} z)+TN)u>)e#)4wxA+&Hr{Gb)l=FF{>czc+F|{JQ*g?h4a^ z{W=E4cVcwysGlD<59eNQ)4b1cIwa7bF6YaLPFP4I;4PRfbN;<;(K+jfcmJ;p6NMDZ z1ny?cT@dfFf1rC&{oDFu+{J_M{2MX#1w^_+dl%hQg=JZ9#{S$C*9MVq+TuX} z?+PlkxAoI~BBoa&9}`bC`ax$Qe_?ng{eW}Z0snrWR|5yDImSUxyr&)ftGvMT+We}f zSS`Xku``(MRYqc+;5nCm>f*EyH#}6^kKnEC_ouVj1^w?1Kg8o0h)>?=KgAzQa; z!TPc#Is@DmE?)v7T9u)fxs9L0yfs$;8RPdaYApk(au%BdSel8lxb1AMaP(O3FRN2* z57{$(el`w-$J-oBK%h`iuyr z`zJ40QejOC-RY=9ICglu-LnxUwL)xjfdsowZ30W9+RL%_fkZ$ON%(4Oq+!{){^zYh zgs$mp#rwZQvgg}x zZ!%JpJDCp}g*Wn=&Hi75PRvn(H+1W>SH&MuYM6KL@2Q|sqY?Cj1yu6xX_0Jn^lT7j z`OWiffG*M6*-zjtdIk(>c^sK<>Z+L93FeKBE+=w_;)bg!(M4EDJp7a)#pIt;ruj1D z84CUV`=sn31qXsuY1%iel^dEFBHeW*T#Da&hqBCeIHGv}W>9H>^?M2)Q?|A%2jnPE0ilA|g^KjWe z)pacD=u(-ky4>Xn^SUJ}X+ej6OL<(R#of|LWNfJ|q4jRIqxiQhF3_jvCeT`EzmN{v zh|%Q774vkkce}oK?ja}c)1~<}zZ6`K%pg{?{)S1+!r~h9Q_Vj|XTd5J#7)O*9fbn@ z@J4^-#~DfYa$}Iv-bk3fA~ePD7Esk4{!o>~Kj4T)X39LBKBOZcN4pSb33s=HwFye% zcRdm;u3e(XIS3s`;)t(0 zDTXvev{ZH)Cmu8pabWB4P9t-vAnpL4Z)A_Nrah4Ew#GS(oqoEH(LcsYDt;tRt-lHU zyG(7Wx(?smZTt47^(h*VaSCfGBFM~ABYchxZ44%|2qY6;O{@Q+S!AF|gD?7-Dk}O< zz?Uo}B?&4Rvu1{z0_lQT5=9dl4J@;cfIh{qzyI~a9o{=%c_aHhHMo}g`yBVU@A$fN zLAITz(^hjBtG4LtBO$!o5@S|CS56{02x#%{YU*$U@AVkZXqx$jH*u9`QkM*Q51040 z-0+Ip^7MXCy-Q?=c8^0@GS*Y`h5}ZI#Vk$O$5M3dMEbNcR56?ATVg>ROk}4jhJ=mc zESXH=nQ*S*w86b{2;ClmUMW9xre?S6*OA{jCC_6c#;aIXj0vBn)YSXZcS08Lzqfa3 zM~qD6e>jaF*Q+t(<^*1UIBe~8sajiVLiB#cA>hkg=39r4$__h|4LWDPuV;0tZWu+0 z5Eo1}Z^w2jFzb)^JbG8Jl<$~qFSdt%l*ObHSQfR&ihDhU1qtr0iGtnUDRSt|OQJ}2 zq;vO6QP7fMl=C^4nk!meb?KD4BOFG=cq!a9{21G`-JHasUNFgnE_awf&hlDD&_`~L zbo43?LvrGyiahmp*6>$)XjQsiGPG52m2<{_OeiqcpFh8zMvH1UulY^Cw(cg08XFCN zpSC6LD8X;%&iqva)zd(_b6w>}`TmjYKgX<=moL$+rxwTFu#UEKcC^nk*IpUYtZ(#@ z4CW|i?`~gwu5^7~0D8ywcMtCr#St{~^E^yR4LJ_ZARtlTj8TPf@u9zXJ}W5bV?NaE z%%|762Lof*-5r>tDfCWZJ~%XSvblIO%T=GTiu10O2%C zo)=2CG|}y3M$9fD<_&GSMbpd?NLr1xS4=VxS|hZ($`Cun&q=EGBjvtl3610S3su@yGLGw-||Ua3aUS%u>w(TmH5)VliE{T*apvI zTH%8;w2f;buTkp~cT-D^Gn{W)%!fw=Yv}eOGifL7#BYe!Mp=9^JavdxM^mwF*L(PA zLB=s#b+_t}5it&HPr*o^BoSx`XX`MGbmM+eUL6p+%2v<*H?jd}=lAXy+thw+7__I<_|JEmTDDquCwW>(2b~APvw6%*f`;^IrgCry42*Q zS@Y2b$(i_462S!mb6Aa}dg0Ak#rqu0wVn;OrTm9NAslEO(*>W(0(XoL!7q9 z61q4BUlDRv7xRrLqYm4wO*__zhAqR2DZ10WIhCzunOfc@l#|#)UL<<}oNu<%kihD= zyUuh@h-@^H{MqKkE59hLcsr@FwB3%6h@sA&|%s?Dpo*1N7nE@tZ^ zK3B0{(}NSPf!v&AAISpc@^q%qDYm739qu*Psn-a}DKy2&I~4T+PY!WKm=Mu63-GXf zWju%JO`Q=1VX$Qgd@nNZ{_&wC=9}FHi3{I>)uqhS$3y95?0vqN63W~B{P9?`cU2qr zs*R=px9Rw(>V0P9v!sc1#j_Xy(jkP@Q>SoZOfTkBmAZV)#5Lrrwi4p^{Uac*a|plI zg>V!z2a&z=W0|{$aS$k%EFE?4+i&->W6-xk9{y+ZMk?Y;IXbVCCt~)argUcMkOkMI zLd^F?AgIXB?z)Cv{fqatzwaXKzWTbia|G1ztuv&@^QV?p+!R^odwu~B8;}yy> zYMxRN6uT{3%!*lSMykca7RM1F%?Q_5drvNp(Z?6|8DR{ndWo^qqSy*tgSgUJtf_ui zMe|#!-#@s^I2nPDZh_f)8}up5SERSl5a?XOeeHG^rjqPB#}mwF=bnOhoka#a5rJmv zMRq@Ob8m_IHSat=dn}s=f0XQpZ3^VqfX)xB=3uLRZM5+V1CEz}os40Lq>nSs@9oG} zzo++qazc3*X4i%5gy;nFwhWO0RlhlG;_BuHLKHvSH$kvM?#kKJeN29n!MH!5Fg#4YiG?~gX zOq)=Ks*cfjYee~L&@;w~~y=l6KPu--Z!=LND%@C8W zoqqNRd(*%iducT5)`Zv{GI5kk^=01+??uO{Mn@h9p|)MWo{G@1*F@B8W8MZL83MdP&f5{aXAvcn9v@3ix~-HTk~x`+G#ciPitl|Ez&WI zZuO#Y>(!7`x*>}L_@ji;_Z|JBF?}M;q3w19qTh7n)C%n>FRz}Fd8&=;Vf@8hcZ_f0 zn0x(=Nzk^6gQ&w@&Q4D`2ueX10uUu6e}EZ)VFH-EGU8hlAjmj$GLQr?FsuL+p@Iak zzwU4;A@ds6Z3v*u`Di79Yk=4#6fCA-c4QKxYEUgOb7lTe%psn$=f1C3(XV=Vl7Eh- zzD_oI|G3Wb^?v($u?k>6{3LT}T6E`a2Yok=Q!1)m8~`T;YOn|*EI{XF^9ysHsoyyA zAMpKQrV!wQ^on{)iSr^IJL1doce&9Yr79Ay7J4PFoLh&1iW}j6H7;redAy2PoQUO8 z&C@B4<*Yq(!d)BxmaF8rRPlF+KVI4;)dyFAd)yGztVFa|D_s10>r^4ozOCN_J)5~@ z!TNjhZ<^!2?xV9o>G*bfo=#CmO53@LCZk-i)UMr%=71)tQubn=KRIXhRf96EMYi zY$i{CuhPfqnMkHtc4K^(n5*AHD-GTkwx+648tYT@CC{7)-YhBZJ{JYtgk8jhRFmK$ zZdJU~MF#QqTEFK!0QUuVNxn0i7zeq_r`0?l7*J^edxgdwH=1YTiW3lLaA1azZO<~Y*)I++5E#^?+!(G$ya8%N|^b2 znSI)reX5vU#f$jGKStx31*ZvF<WJAxwUA_?x&w+-(Ed#^S z(?UxrwRT>sAntFN&n^YG4rS*8Z4b4{G^Fl{LX3t5%!7fK3}S=5rxsD$(2W9Lf%RpH zhso#QZRehDJ9TD|X3)fMDFqBz#j9>STQaJ~+`cHnO_P{z;1 zZGX6WFz<{l_ikO3^A}t+m*n4p`tVE0he$3$-}PrUQ1}=l!6abTZ7?^S71=Xkj+wp9 zv5y(Rdt7>RF7*a_bCa*7RkEeb=hf$PWt*f4y|+8@uJqS3ZG-1R9_-&%J#2Zp*m?Wq z{Ha-Vtw1!J60%E^*G5B@!M)ccChKND_wk2$(7yu~Vu1uLd%z8%+uc&ZJVBNKp!jZf z`x$m%>k<0iy^`OzZDCVe8e}%YnA7@|C<0ggm&I6mw%PRaPT$m7q%WU?=g!8XAPJ5u!cw5;c5#+eBvD5{xo#sGUtp5!`Za1zsv^PaH49IQ3U;=s_J50LKCvTO=oPcRxn_=-o>2KD zoQnNd^wtttR&&B0=S0HM1XFQJd=%R+u3}S7Ngce5C`lo+G=Dg}mObl|QEZ)u(=#Ny zhdNcJ`Rrpi6YENMi>xj+-_Vm%J-4`ICWmwDXU>;^Pq#vU>K&QrZONY_`( zC5DvhpFOWclm6HSAXCwO!(28@Ya9P*vEPW~j7HY@6>Xa1*_#R55%t}hZW^>Ic}6ON zAE=4D1^jE#;k(h-kxAfBs*M;-S*HD@jJdXaUhf>VV zIL@*70`2@Z-vAtBWx(eKF>QZ;_cVXQdvx#@)T%-(6}l_x6}_kOb{FnMqz<^NB)#+< z^+L@;&3uEUzg??xz2?2VWNXlOlh{t8QC_)f{&9g1Uy!NKU|K=e0i_t?pSKM8yIsDgWAp!7E;Jy)`lMTC6-RYu1V-i?^ zAWmtfZ~l`R4T~M1lRO*#Wk=OYx4HL)g|y##-~DEQ|xZw)Z@ zBHMlN4+pu@+930ku@P*S?t;S-mr=qpw|O$G-`XRIHL{M+Rd2VNZ;= z2H*5iPS~N0BajC3mX#PrWUh+IBKr9-Mv*L9)=CcIF^zyeHUqj@gT$n~L))XxIOpO~ zStS1oCe04u9FOsm8HbdaU4>5Hz@joQ?ay>x?+8@QD_<*C1-`=#8{&QjvT5qS=tjSS z$R_S>RX4~9B3m47R6MhqGkzv`ov}d#yNRtB>#z>4Jv@|SnF(9L5*V7gQqi_=- znc4+itnCSn`p-12$t4#9FTB2X{`%s7LEZ+Z7Tca=I^KTI>dtijQ<=2McbvG2sJ{5A zY|A1idsi3tM13`5G(H(O?gwQ0dP$c`QM{NEnXxLv{>YUSe`bSiF_XtLs9o|rq2aug zxVX-$t+qOKJ5Z`emf4}&9B%2M&mMb~>2Wo={Y)4|1jjLo8H{$>NLbuH*y5E>*ElWe zhv5JjqWD9#$2>@WXG`u}!@n%dm;cRzFqq|(RyMkqyoch!WymTQev??!i|)>Kfv$Ev zcr(*h@V&)~8!_(_@Hzz2?BfN+*rzoax_(riX;HaC!nmt+*q~Cbrxuf>8Cf*dMMOD* zDmifK)Z#;US*<_u(dT}%0&~IrZkswmWLZq3WZl)vmDw8t@7~HcneG#L+Z)XLHp6aO zvU%H3q){uOzf+D|#n(m7WKcp=A2D89~S%nK+cBG9IS@+Y6P3$Ua z?P4Rcdcw@OilmZr>)Cej)Ts8a$R$jQDMV!GoulHOFP?NZe8tq z<@HI=Cg|IWnitiXS8k>#FP|Ih@&izx0krfSI`SoYnh!v{P3iEvAc&cznNUF8E;1)hR-VwyRsGD+}EsI_l`}j@qlW0 zmYn1*)i+awH&dE78+uPa#E))~yyZq(B4P(Xt|^@;K7p;e?iAvG77cdsvETzAU{RkQy5!cpv2f3~? zk?Lx67jNZaL0lOP;4J!QC61;OMJ!kux1Wm)dJpH2{%M0`nZQx(WYB0E?Y%{0l8N@U z)3_-aeGlcn21k2x+Dg+5)RgR9yPZ=WY6>>N@JCpg2GjL?d4lP{7VzG+O@DSvN}C_k zg2vc;^5~TTGroDE<}+2torjcm}I-QTbnMPejg-Bh+YE* z+7PbDJi?G9DtMg%GG)-5B1oJC*pMqK)C!|;Vx=A&e9JE_3XIE~POWr}_1M6oK4*a%38M;%70;bRn{^W?s?-A>|+ z*LG=di?t=M(mZ$Q)XJ}L`-DZ@ZXlkAoT;rmudQ!)O1zeQ^(`L+CiU37S~#u-dAEVA zL9!#-4EPEsd$gX-H%b}^7+@8o*ZvpkU&_+UkKDa&gYu&m7XM2*XX14y zdR$|V!gY80>^=3W*5GyJ}0@vcQG^ebWWmaqwo~Z%<+Ga z^I{2`t~J6J0zIh%@+YjrQOSX!GZ1cv*aAi(E=c)#dZ>Q7g!9ple}!=RS2rO2?7a@o zp$Dt4OF(W30UIl%i?5Y1fc4(cE95pD*wMo)2&|sp-e9{e)9ti5FsuJ#Dq+ez2+yR4 z%-|=!J!7CA1II=)KzO~EA_(s<7G(36y60MNZ1%;F$c!zbr_x2?8Js`!P@fk5pBsaw znCNG+Z{xm98yj3O!_wS|4vlN~%eV1AS)K^;SFz+Nk8qRS=q~az_-UP84a*@u{7)we z3pm$?UNw`m1?&jkw9&6^@XrdnIxZF}@0?$*j%Py{rSa2!Zn>d<-5&#J(N>^RrZRmd-M$> zSVWdr?@_KOVT*!HPZ|pylJ5LW(AZzcpjD7tJefG#So=!@zSW#r(!--hLM6}D;sZA0 zkg#!CVKQHxFmi4hXu&y21$&tBLp*7xIx<iKnPtkYnfR(#N~a zJ)geOoL*~-7Y8lZtCpwMf1+Q*-W(q>XLKChbSKd>CAW1LD&=%f&FzP_)bqRKx*`$f z@2Ufn@QZsnp(9Ps!gNish%^F2blL=O1{{3!8wOg=gX; zWl#0<>Mxg~q<6TNRy>&*Yq!54j79TTRzkTW@v9u)9&E7wc>cF-=z8CfvaS?B@(6&bxjqYxt;;9+9Rcrx1r*bkM+sA}YciGaX{(t9-Jfzl=Nj-~?R+1)n(?sj7 z+b;DsHr1ZdeE*kTL)6L&NY?un1bt=rtBmf5xzvD}Qx>pjbc~()BDrw6lIYmYVKyXe z-||0SPd*ebO$OI=u}kD~o}#B>3_{=Z3LFWnRNtdBGkq!@Rf-h%RU(7n`~S5K=+2y) zLy{sjq9uZaJW%g*xi6DrD79BID@>?40WJ)(k1aoZcpd!;7+{@V-&}4U`!HoiI6;7M z=g*M8Q0-W3{Uj8N)>Fte^N(~(T|mViS3YyPlQnaqv#x1A=zBa%O1bdY&+Nl19z`A< zGsJfQNy@#&lip8X+f6bL(acJh<-hrj4U7M=(&|?WHH=-27`G@8!Z5B)vKI_2*0V@DqnZ8y0z>}Zj`tH1;Ox}z+uc_CabVI7L53A@ zC@SKVwY^F;Hkfu!K}}kH%r8vEtHnr7heO?Tzp9;icipJ+`NZzx*N#5Er@xxw{*zYu zeVnQ}rQLbhdSr9I(bS#Q)H(8yJMBJCFs-?D{r& z?H!v86Gzha0|xgc?%1?%cFzD4>6OZ3Op^Za@0==8vx-zSWh+5w_AP;E_9{W>_2nU* zQSd=v=g?#PbBB<4eLCMZKdnhZW%~QII z#NVN$)uh6t{D^T&X@^yh<~1i7Wo$?EVO{Qw?Wu4zwImm2ClWx@oLuhdKEzMf-<-K%e{ zvd2t%Pgr{miTaGHXjbjvSFd3^<*Q!e>V+-G?~gi{Z$t=F)LzB|GK>n^pivqOe_;+` zui=kka_v~CR`W!oj823_s6{5CF^9~iE|1wzS`3y&3Go2Xz9Bk1Bs>&6#4GeG|a6g!4RNM`w*IgMm;()(%y$$J7YQe!+ZJLRDC4E3Pd#$OVt1`_uKQl!SKZ4%3O zhe9SI*~b+(u=1iY=aTj$&ZPAu&7|}s%%t-qi=^^@`@-=g$)xZk$fWxuo(YZ#3@JKO z7E=^c5>pV;P^K)Vs03L|DNG?unehUc0+=G0yru2ODSq8X(?zaM=540 znqo@Q5G2{~0+xK15|#p%B9^?R(xt+s;-&nhlBI&BqNO~QQk6oLVwHTA5|sj#B9%N5 zC7dJ(DGt&oBq_*z5aj?0|0IBjY@@=Y(l_NQ6#fYMVw~a71KIn6q+&NE8Wi{l?qM1G z!le8(iU^}Nkz|Dk%2^6|N_mP{%2*0`N_dJ_%2x_kN>_?j%2o%2f(=N_C1< z%2Wz;N_2`-%2SFzlz%8nDN8BHDak2DDMu+plY}M|ClDtLr2vl+jtPzdO!-amP3ezq zC-5e0Cg>*gllUop6OQM3;ys=;hHbX|EKXk z{acIIUsJT;4!0Dpxu9&u7;P$AaYtB-lUBJ*a4quS&My#VYl^RdIx=Kb6B~gxGGstb z{i9I`ILD9$E&)>Z8|UJYV~`9bhadw&_Pf`Z95lyaKua`sa_b^M$AFC*NQv)*AP^62 zMuY(m0bKtFd@kxj{_?Lq7-n= z!=6Ju19Af60vs0*Cy)$vL2v@txSS&q~)3#qIGQnmYI znunx-sICq`WIqEa0@MKp+w7ga`rznAQZ$CX)jG1xUT}1)wjJ0_+7~ zTjdUD3MgNt2>{=x3+{@V=4{9AkF5ZfNEU@+kR+50FkGy?#2l3aB=0w>;Tbe9ya#B9 z-&Pu^1nTOn5U~d**6Od_CtJ1McgAH0+ZNGG-DfJJ2Sf*i?)SLn09V+Ek==jBVh6<$ z-i+L5FM|gh;U~05(Sp-|#-NY~ya*cVWB}hUxCcP?3jFEU-M72e0KOJIuG^>Im)nQG z)_}a`KYrcEw&%LXzNdb*6)}&thi<^U!Ul-V*bexHsR925QZ+c!jPVWe1yBX3cIl@E zR1NF_tp>OYybHJsvs5df`%Af_(1wjLX20(BA z3DECljZcvVKDGQp7{J+QWi5|9wcz5;tj65TE5!hu7IV00XG6sxy4eVlXzzu@fS3jl z1^(b?-{wIR6AS3j>>X#oLWdokP3(^-0LOrX5#snSsd&~AaPVPF3{MFR2p9m);Gcj$ z4kBlU5}yG*4kKs!mb>qS{v{TV4SbgZmW?E+lHZL0yR{|lS%vkC`dNYXjQ;J+2Alev z8=@X9$n08KlKvW-~EOR0azV1+%hl~&s57xvhNQ_AOLOy;0fR#4ufGpewh!TV*vaGm9Oxl`lI+M-lTT$gwF)egia=3aR==coAI}j zqmZMHql_v2WS#itNv0l>c8YcaPJNrQL!EZ3QO$A8af@-vbZ$x>LQXnPvO^-n{G+m? zu%kNpn8LnN|HC={r4?5@6jPj$`|oSfAssr`Bo?~25g-hC1eGV0xMJKfK_Lk}nE()J z5sBZ!S0;**o3N)@WAU$&;s3AI0;$gkM9Z*kpANnZf8IHl;u@k<}p3j{dv)9@(G^4ReR?DOm~_y*(sRX-#AaquqGt+Bn+udRL5uLS=D zKWFlc>lx9n0N*I?7}Bq*J^2FsA*#M6(mkV}`@ROszMWlkuX=q3cgE)~^ZfKYue{Rh zDCN)dyn7I9A?GIF+?V7L&|{M~ax1pfu07^G;60IL>aw?uUY%dtmSJUf+}2#6<(H;v zEHRm$bZBF?WR|67ehu*BE@V5j8<~$PB2yEy$@GiAk(`xuEh62a?Y33S^SRpkep{>Y z{_d=Ja?}FbvN(`awlIq=v|w7alZXi;!~+u-o>`}{pxRsOPcY0f3!7q(@angQjsEF( z8}tD_WTM_FV5o0}k~)Mn8-~^8s%z6Sv5{;}+m1Mp4FoM7jr}u~Zp?Cv6~#_Zvva1= z(d53GV9-Uy*jn}Bu4`mtCly{xg9G~+Q7tD{NE{u*>QHq3u;d}rs=al?)oRi1?&ew{ z#fse7p%R+BansT<#@JGtJj-RIC^gZ(z^ zX(QEjdjESBnN&{=E$;P45d0h7?!Gq8prTh`?LBJG5c(TV?!K~DK%Zf8ZxOw*#6CA{Mkq0N=n7lQwt`$p<+qrb7oJmwnF zG6)k0A7d8-tr>(Tjz3fnbZXsG`LH=oLWp^@!g;t9LKsm3_})Lwxd_#n7|DAR>_r@U zGzigI>(~muPD}Z{7ep8cRNcSd7f45@I~_y-?P8Am>a7Pq^~K4z8l-^g-SzMO(|%Tz zD3hVcJOv%>G<7N(##XP7bKyt74>r|J#QdVjRQiinn-flZngrRif4AQ_GspY5J@9F%%@(i?O=D2MkOBoUc&+P2< zP)IfYEZ`pRMsY!npSYwW#YEo*mQls!%E*PIt#z->?dy%XHb~6C*M0Ied&XVk9Kwn} zL82_NYFu%iRUBsGA)2({Voi2QPx|xvM%0oLC1Ij#VVAD-~B@qFyA-JEkk%cjT95IlWe;cX9DRWu}L9o_ro9Sn{6U+Yew6 z)meHUsQUo|tQk2Y{8w?~_`k+Y%)-IZiGYKVUdGhskCQn8BP%;21HBvpC%uZhohiM% z(O+dJ>;Fx_WxmstX&Fm0YTx`j;gw{7pzJ;i0s>e>vCd+EiC`)%P{?03#Ms=IAWft0 z_kO$km^F5hovxce4NZ@=o}Hbnnw&b;mK7%9zJ=>bIop}?+;}sgc%#I5;~-^Sky7@Y zucby?TCAm5{ZgUTUCB%P!Mgmwc;oY2`T6VhxICDdy6f{DN#En~_HsSh%K!E70}rp? z`}K33iSKiNu3r25a5wo$|NFD;T8q~+f82Sv+PlJ=>+`GrQ&d>>x$AZRo(f+<9(@J> z+UtJ5-OJnc^oNYC%P%r2FQxn1>-l_ed3$^DeXHmD`O-M6_j`MsSaNSKseC>x z-}!NItLOFjungb(HXW%>|NC%qOka3?TnT=1)jD_giN4FT^}E{p`_k(B^Q7-GE6?Zo zeL=1N^>CdT+Uong`F*S3`+hP^&Hhap!Wwh<6WMz4yDH!N{jof_ZU1|`8EX5ZEnoBM z`?cQ*{%X2qtLO9j@|^j}e|x?Z&-*;9=k+U)3g45_{I>7OdmCx{d)kc0X(Ze8_7z+E z`y4s@dpPX+dP=nG`$I0PVeJ0FbxZr|_5Rx1Wy;c#iWf!Gi@p1uOE+4Kdsy+U_UbU+ zuHQWPT!a5j*W9^uJ2E%h)|69Yv2#q6py}Gab6ir5IQ+y7TZ`U2-N|%wu{7JY)Kc@e zQZ#jmA|l@tsa?k-bCv5R!v-aomDgjsvaBW?X1NgGm3N!<`AoBQOSQXUReHIzOl{t9 zF`4tCxwbPuvcrB!y7s&xUye1(hy8MU3Hwh+=C{x;=M75(nMcm$!k^xnL!0|Dht`Yj z1qJXTM*%)pXN#Fow18yuzw)w$OF-tlN-NP-a&oCFODTk1T08 z6?|2ov(?WF4JkU*30U6GIVC+@5!QE=ugxB-WX>%Dk>a~Mbga{s7ap&9^u3~o6{|zc zERR$B*&aU1nJ2kstMe<4ET2P9@X^K>S>_MH?=?=89`MdS+FUmyR;x`-oLZ4BAwI0~ zv<7?zprMq)Q-Cdb(Fd(MBV+1};LF1ZO(~ofnX%<;+qKRX3UX@t)ylzMO>*hZ3mYbB zu$M)&ldP83AFP(u4HO0rzS%^Iy!xlymev()oa|?|Zn|lXH_ErFnG{5^gDjb$Bf2C1 zWS;jelCl^yFW`yDkQI<_^daVo#tB3 zPb3NRvQ{|M9ZWi@Hff1cAGuVkNye3y>Bv(VX*OPznL9_Qja(NL4FDGU)}U{MxX|Jj z>O`hg_0@SRl2jd+)X1$8RnP?&mVl$>!20Bdj^Ej?9wiaMxHsygXO?|y%FFlb(2A}#OzS>SMAO}o#@x39=PW=WS# zQA-T@LJQULR_~dr_;kX6o}Bh3>TSTt9FsKBXm0ze_0Ugl!c6ZtnKSxKM?ZY7T`5X+ zsE#u_!MNXSBGU!u)vsKs7XtE7XSIxAE!RS>_p@BiP>ElkyIE129-}%mpS;u<&ZW9V zDpz?+U_)lAiEdW=^s72Zy*Ihljy*gfE_Yw%sHvWpH(uCS@|It|pEiOfQpE?vpB|W} z*)hJ*p#~oUr47sc+kGJ}&SOHJRu?nc>ic|S<=yvK5J3<3QqBNaxv{ClG*#Aw#_%eGFf znw9oBpvLiy;cxOeBa6*-1xZ$VW)bwl46&^+^NmMJ6v z&2^z_!j31aAZcw_|AObLtFjN-=|cv`Ao*wFAW?Ks3C+$I;vsJGQheZ(ie@VYA7+8< zFiUnv`WtdN$9*40m*z*`w1dtvfmvqZ8>A3o=fw>Qk5KcxLP7!t5p~{1G%V?mN0t}m z($GZp8l^jq`T|~F`7LQ2aqHrKn*?P1UopXF*4X`ZQm7hJlI&w1?g$rr2n-}oEIXV!JURsK7(k|O*5CnJo` z*6%RUBw8yw@EoNpdCajmO?;x*mX{vJgy9p;5>Bv=Zq3IIni8bFy}QFbGkJ;|#ZOTK z9cGTdihB|ThwV!>taOBP%{Qt34g@yLBILrK+fAc|1Id-EYFn6mrI$^SF>pS#%K=X?v zy_ef_%nR_r|@x z2+(f)Xx2@oaO$t#jg`}CkEXG_Nnb~k& zrscUr5Z6t}UMv0O9v*w25InWaNom?!6+)amewbihUM7@PeiW9728KQB*@0?`uA=y;|EPOwvA(}q%!OsWQ`Ov5F zU~Ho4(1~AoSO~Tgki6fo#`0p-NFwZ?(gU|RFvB;O6pN}-1chX1m>I?*zezNc%hEI# zHWfejC`2tIX}qF-e=e!<@Bu|vG|@#u#>()rk-tL}6GR!ySpmi7d(SEW(4%D@3dyaj zKk3C#Jj411Yp!{r)5s?vpDs~oskoSjY-B$VDL_h?7exb;!E!h6&RB}X+;0wa;whyE zDmJThL`$F;PD%o=VQqk6Sh0`ID-afXwnxRYeAGeYK)W}s3X&+q* ztzDE5W`w`B9nMEX{4xQ{8ghs$JffK~8g-7R6zsBRUsUP3e9D--#xZ1K^nG0#&Y#(E z!WCqw;}k0cyNP#@cnBw)6d<{*30* zH4n&Q`kkR0^hSLy;VlZn3gc_?om-#hII^Aqge5y!0KjoHQroI=2y886SIO*o1j~oy zZaR(AaVlXK(}!lGVrp@iH6jnGf7rHFU?zA!1TSeX*v+ma1IkV9jB*wOU1->d+F0cmzoF_D zpvU5{>AReC%!Xl_6YZ#y9xev(^FjW;@@Q>_X`GjMyfKS5bwCTyO#$VM7Isip3m^*d zbt^oq+phON6D$k(S{jHV%w-}#Ur$w?-YDaf(!n`*4na6$#z0|x1af(ZkjiKq5^EFV zU{uOYX;-8&Eg&y(R&6fqi;XFF@J?@V35$3EGlI?TGaWaze5C$nawc3ZJ0C1{s%D6)QA{yomo>S+PK&>g}yFb_lX8;8O>}Sd` zwwID|CDb;}$%`3zmfoA{iLqDhfn>$?Asdh6Fiy)4UVz{Syr?J*zu6zm21GwlR4Ry0MUSJVMLEMJLo5yYZAlT{jk8*~_93l8 zKepMM{m1y!rUIMRyoO@JngG~i6cc(1Rt}vS-XDV^m*WorGByX9~6&naP2RfC*!2XQN5J>fXP(Iy8zrrnhCB;oI z=Jy!l3gxZKO_}W3YwoJDUe(%0 z3rPisA*}X@LGp9GnslK~*3fNo2n4y(dnKm%ez(1=00pqy6QlHzWp+AT-`h4};LQV5 z6`R{Ups1A==Y96PT_B=|G3zfE)$6KHM!YR&^Ff9O0u24=Kd!w|F+VmA+V zvvUxfzB|8I35*9=Ep|bSKU#N+%|s6+P9^7lsAbgbK40F${1kBHcc{wNQngbV6Wzbg z^`@?uZ+$uq(r!$IW9|4H0X!*2v(>e-%Xq!)fMA6Nu)>SqsZ)Ooh+l2^&-Kkxqu0th z6!$NjbmN3W1MN5%lv<^2^73CLjJ$$&e z1?``jc`j)>+U(}8e1~iSLSp+CmcT1R>(&ca2jKG?V-&gol4xc;{g4q*FY-VOQfM7l z8o$Sbj8TBBe87I~5ah=+^nrob-^2~@hu6gGFGI%&G|~0BbPuXrpk3Pr@y{fpf}3XyrGvx+GqdQPwDFt#xBnbu z@uYCl{{?G+3dI+Lu0+%ts#op7l@i2717a6Ro!QT;Z*;uevBMofP4XXXSl6AOnG0hs z0%%TPqk#Ti3(?qGgyUY8wO`U?T1zN2-Q%d@h%{aYqw3rmooCI%;lAo4#8TVTTC&$7 zV}=@Wp5}W6?bN2u&>VolfE&mRhX$;*w+i5>IYf&3qKyQ!yIjyVSY)lqG)B}0s!ZpC z6A;5}8yloF8o{9s<>Uw`#SV%=2tAU>mwG}#$nG&4dh;VR#Zk=z#bluLG?=SewnS$C zptUDSpU=eo+ezmRDgYqZIpYpFx0kU`njRgK1`Xbf1Bh8Jge<0XKhPR^Sd~^(Pg{-W zr__=%j=EQPg&RnU#SoA|TMHOekZuRYJ}6NQBAti*RWN8)=$=!X)#dQ-aY`NUchX{`_lNmlT9JfBT0hKevhVJv%{^!O>hA`zg+(k# zBK4qAA?Iy;p6Z{d4Rg9b%p?Y-_AIZB>G4aBB+odIJMjN@YVSDVjEgQYMcKrR$1jbT z9wFTxj?s|}i-xiJT!lgaqQl3EL>!=&IPT1WK0@z0dV&6IGwU#1_`MMfpbiZIbfiq*$3mi z&~Qq@gOJS3AJqA4+fR9F{mHGNd5i)65G1v=2zyMNROUTnl90M~K0$(+e!vMA!rQ5V z`9_8cUP73-*^zk+Dl&NfzSiP})j3~<=*Mij7P{YwfE#K2+HUsO1Yd*oLrfiPIax&X zi^aEXzoFIRvx0u|_>Lkf%8nWM$&R+N@-L9wveA*p_>U&%dMnSF5|NudS^<+E926LYu^no17qPx=YqExa_HUIb+U zO6+5<*JD;`=@?zGoqI`Eju5gA0W}z90BVj01+^{mP-P$Z7@&DaKQ2Ul0_rj4L6>-e zxs8MA#=L3MS+0H8=G>_YGkk9BVO@V!B2FaCb1@sDglq|*Jv7yosk{l*F?k-KGxbBN zNtd416WLMGOUtpK%#g=}V%GAXG8(s>F30*&r_Aj3)o5JZ5=mhHo?*QZ-sWuwmkmRW z45^4ZiYsbNO8p^+2$|?Znee~I3Dc-jyZj3dg^OnSOlv9(T4wp{>sxzVmjh~I!s}8s zIVC#1sAVk>TyfX@fk!*zST%HG$ZTRX&8b{bv}sSh76Gblum6|NrUHSe0nR+QBxOlP-CRGy{`V zkO&XbgMqDJyv~3mheRvkFdO*rKueZvK7j^2Z|cBYb>y&~;S!3+Yi|6v1T^{IjT5Sk z$#x8OwUrA;bRbytI6c|XF<8NL&Da6=-YCw%uxZGJgeaN`SBlY%Y{ox4`Ch&`tXuz8 zmc7FIz- zXaq2X#-mIRFqd3R9dm8O;9P<=$X%Wx;x;adX>M|`BrLxk_hHZk+zP=CIYzd#F%5}v z5aq!&V($>9A;e2xT+xlC23&k=Xv}=eAmhb0qLoawCOAC2V10Es<2@TJf0*4FD_er@o~Hj2=yQ;=@OP$GfU>o zy`4mZWlTW7s%GAp{aHiS5eQqdY;YEr0OX+B^&XNuHgSLgT?9mqs3?$fHZdcSTI|a! zakpZ!z7fPgfpS{Jmb!Z$RkI;JAfHv#`zRc*2ZSfpZj}la@L5if1#_Dq5f9=ylRZVm zY&{|h&14tB+j1W*`zZf>Q>sgtI}q?yQ#m3idaMSjrOMil`dE#fLM+0a z^MQfqUJBB&LHh@ztEhI4QO2x6umeI2r$?m!h_LL}p0N&OP78Jjx5Az{2S9Q{n~>$Z zKHp_+97+Ho`Qk`luU%MOMTBzPC>KSgaK%8ZWFl7D(}G2W`3(x{;yfeN1^iA!mvWrZ zimKr6*t;Dl@>D?V%7bNHU5Qun4>|WgN`r*$y_SnPRuL)yqWvueEX)S_3kV6$uHVry z$9WwlFJ>~WVW26l~(?1Gsk zTy1_6>m7TA&3X_R4I4q!r8k5re8$n%mpi;Jk!uqTLvc~>_KK_N!Tl0?sNdp#=7wySJd~7$d2|$Ew27s-l*P7m;={9n zfWWV-@IQEa$0%L8ZcFf_ZQHhO+qP}nwr$(CGf(Pd=1JSO-TB^otG=qPJF0qgRrm8J zB6d6xBjQJ_wf5X|&UG~1&%?>MlClR&}5$sAPwT6#DAHY(~7&gh6@#|3&1cWQx)mCCiiPUlmk^2_p>j3L7yv& z3(-T~dkP)3Re!*>)i;boRlYP`BLM7_erJh#;mWBsE5yum0n`CPR_|h*o2l*3Jk*)H z^&@d;iZva$g5H3Sat5oGCmgrVRvxR(318_+y}pQv%F#@YsqH=2A;Dhi5d?9(;kUWx#%Rlbu|rW{*HH`v59l9a9QnDNY;su5LCsV8$D4 zPM~=>H$f?QW;k{FXCLr%cGY%UBH~z=N5KtedRHMa*C17*D~{%&vlJufARmfktdpx- zHnp`|2!E&*p-FXfiEnNK&Mz1%-Q`H7dR#Abl+x-tWYblF-qhA&TvwXMOEZo?XRv=Q zm1~9u2{{~sHl&Q0Wc>#@=4ssTPOp+kC;@h{hj;>jHu}#rA*m;9rbcVo#=I2Eg$mb4uHD`C_z2OQ}fprLYO7JSj=abX$FNLAOo8ytfpv~ z)9BXCJvcHa0pqdnkvMAHxV*~m!_t_RA&R&#JlGqfj`*Y?pB>;>mS8F8GNm(v@`@Gw zSmb7yMt8BMNCZz+7jx%Uuc@3Vv}`$v5bS4lOpM{q6tQF6sNzUyb?bD7*~txG z%6-7hb8e&5Qs5btpbnn5O$G&gL3$nC&}au;Y7Wu};tcT^;UoYXz(Sy-!Aio)dHy}< zP(yI6JjzJL*n`Z5nBGn&7R*U3>QgTeDW7s^?wocx1*eD=TMzi1O4iij(i^=u{uSF&?1WRiYknZt3gyY zd8%^Is2N4vtv7PkD8qwwn#l4nEWOv3r9xOTyUA&eN7+Jp!h-$=^n|I7sFA$`eD%16qLD9GDy1XgVW#x0$wpXyl%ddPd3D3=e{V zrCDhGRSi0Qann+<&XRZpBb^|v%fN7FZh1s(V5DBtN*W9r{I zR+cpcXDOUDkNv9Z)%$X;pK9y?pMM>YS->1bh7zXZf=f%82D}m?<9m%-?#>x2)y5A{ zlhfARKC;0;Lwmn<3QbB0tbxuks!-Mvk7Hk>o{PdDO%&9Gw|N+f^dK6|GrMn`%D7(7=-D0EbjC1l9qX zFASdvBh0JDgCSt6A zu(qw-^8*|%WrZvZkd+gX>tW?tLclZNgcMR7ZWknv6MHnt(0%ezz-A;v#oIkTy=)Ey zB*8`=*q$bk0uAIFqVRA78$^`f*`1{vseQMW028S!B!DmvPduezXf)Rq2o>#sH}o#T z(g{>BkA$hR1j-4&v{G#i>T*a9<)eh2X6yUj)(B}~zb;doX??5dFg~M%6_%N4(+xS> zkL;s-yN=u=F^bAvr^Cp?A#o&MCa@@VqmbFzqFCCPOxl7CVY@{OSujBO?xG4UG{zab ztE{w3;l^YLC~^)(!(R>(89q^v_mwXHpcjGSvYsMMpx^>(xTp5$WtVqo_IarZz{u7k z34bvb^Z%Z;-#g3@Ii;?!n5hMD9}UOJdTY}DsXOC>4u3(+j2UCX5^`7kb;Kn^$DJi~ z)Ji^*W;@6=ql5KGFmW`w&Ud>@|?E94|yzOIWP@SukLeL5}@SY*-s+DgU-CmWgsnhEMQNr7eH; zOY|%CrCvqAGHmI>M56N+^gN~_kV~RH{`y;VHwU0v^5tRz98cJNnq(KY>xGN32$}aE zjww!v`xSuJ#lXlmPLhiclDa+m=H!`S3+^0Q8P3s}PuU5Ju?bkJBAwxC`0Rx6UbO3d z-7+q&pf!wn+4)fveOy2vJAl1h`)KF_UTRqv>^I-c^s?`e>5W-?USg%cc(r|Ri=TeIuV=f{lXiVyBWQX&-tSL`>v+B%-(g{O zdcNOJ(?9g9hpJ^ikC(&GG{5hwE;YFAle^8^bG`FixxQ~IzvipYFE%~zufw3T^mD)V z@9C)Pugl%TWj`;d#Vx-tcpScjPW$gJHM-ufuiCBetH(AwzW0mAZ(ZJiFu%R%i;Mp7 zjk?~qF0}1#@5doJ-}}AA$-Zun*GY4)*Ry~JiN27~?Y@_z-LJl{&$hnL`?Bvwug=fO zHapy{uSdJ*KEKbSEVR0fhcfhpr?-DtUT^<*mY1{N_ZVKUm%WpB{kQw==4VIabKW;ZB5!UF z1k%cXki77JNM6vCeOPNhBro?*2LX{2x`tFdE&IY)+yU7mZ9MXW_|60JP(t2Pb^(?i zJ;#VW=Zg=8l9>7i7#Sl_`~|y`VY>#?eR{{RLT)+^_>Vlg-RnG7!b0?mO1Y?lJUEoB zvTI0!96T-gn{Y-iU1(-*uUe?|^if#m1ZQ*Wgy0%L{UQKqciunuO5Bz#q1E44)*7;6j!@i5g+-l z3Lt+cI%;Ka1>udvUgdPN=Om?Q6U8It?^#ZSRK^bah2y6P?Z0=j*;VC=?X0NwJCCTBU zNuR(aC%~eWopbm_c)fp38}PQ?DwJq-r4cT^iKXOef18zX-u3jTUAS6bqk*||Q-+2( zxi{w&EeZ^H z`#tc%ryZn!!hN#IA1LB*q5DV@6XLiwUbb5PML%RHWDs|8!WR7i>zzrMIr#>TUQ|5{Au%@%IT?xj-<@%)TQ)c+r9xOPddlAt2TM6W3cDUPVdQv7B3z2<<5wvUW z`85}v*cTT8;)j0mOmPHnXA%{Y<<1ybFEW=cHyram?TSPad&xZlw3pM0yo(;U%CM_Y zZ`swAy@BB#1H~@5b&Bi?>zt-|9Gkm>fJ|)3ZuzalkXL`geNh_0-0*evqb?x=1g~5` zCt5ysICYIvo5f0+t;G%WMf4h@#F0~}#hoz^%DN!BDf#qY*D!c5JpsD=YGQHxEXW^OV?*<4&9=zeeOk zg^pl}=IxRfJyv-{+evd;+e>F38k*>`V9V3iB+4YtAf(cEY#;Iq+rR>#wV;y+Y1xi#z-!0=KWllfC*(?MRYH228>9@5 z7cc(!%6ehQAY@nq(FeKGEYhQdDTPh)rrMWEHk}s=gX+M4*ZsI>CWbvQS7xEG!DYkz zoqBe$wJGLcd#t47qZm1Ch2~wl`t0I+gahWYb*p^@FRkg*l{dC=GCUFRZek{0g6qBb z+r00XQVlo^`+93XdJefA@MIkD3!kUpEiJygLxIMdc0c@ev}yF|NATHs8ZD)vlLy^(9CBbQSA_k>qXJInDyt}t-o^|z{E zYvFyvCw7GfP#ae0!``w3F4sIbC|J744x~8uR9byTk!E>OddUzr>xUhll{h!8XnVk+ zra$tGM%WMhW{0K1geGy?gCuT$vY(iqb9-xg-Fk0m&GOZBh{tb4&KFjhO}$Ts0R3wk zE`=u5paG9k?r)`Ef2Q+m1`OB8q+BMIk#D;-!$N=_pL}li2I_$JH#p+n9aH!yOw0?! z!%k>24^>&n_N|q7KDR0nU*uJ)!X`OKdj%B3=u51-X{J{XpS2j5U8cK=pdP$Quqn|O zxFW$s#=#^uHp#kNh-b16_WtiWqBMM&eJOVazAs6+fk>$C%DMbN*96}R^dz%) zxM$zizGQ{p3g%y8_^)LaFX$D01&?tF9%P|5=(hK^(n>>AWgY92Fgh?wr`2M^AL&>P zAgsi{8^qqs?XZ@#w5X^HY_etr>R9Ip{eK^&6-18Ry#U2ya^E6oUJM8B-VA1SAFZZG z`Cf%e=bZ!19)3Y^Y;74Kbza6d8)!IheQWd4JBP7_8eh&|l;p~Gt{;^Vc@7bC$dJ`d zhP$}3kI7)wU0oYj#^my4_|fw9(&F*A`$04M0)qH~KW&1TJW^lt9e30|!1cG6@KEr9 z=JAHg!(^pC7bx~MUGUQEfa>p#@4<8i;c@*z=P?X+o2j78#v7YycGFE(XsgaH7@Ip1 z?+9Pv=n+3LTY_TcHe)virandv*}(NMX{AkR#n@oW9>86e!82Bc1#=oePL)Ys1=kCg zKF%t97GRs^dR{$Ua!g!S1@xG>+|`a&J{&LhskPxm=43DC zhc~6liIFUr4Q|SGV128RVUM1x_OG%|TUv85tZU$QsePPh zr|iGb-4;&VPL0hqfYk){h2skDYL4of4YJ>HK2B1x$`XMms-_UcuU{Qe>D#Sa->kG_ zM2t~_$-)vOS*q*lQy|IkB7(ex8$6vg^V&**!mRT?)At{s4WckqO}g}R`Qpm_=;N|) zwWtv_x)z!=ii?(A&RBMHMM&er6v(bsA2hni6HdQ%6IQCp%ZjPzKp^hJ8|C3l^@_Aa2c#7w#auA$;ZR_AY_a+J=1PeMMfVLQ7fPm8PRj|BnN1s zri6l>2xK2ogtXV+()dBUpb=;-yU_MZ-PvNp;L~KV6Tra=DiR4TRdW zj^T0MIfQYKPGKdnmFO6vt1i~{4Jf#CyD>V?Y-ijRnaubllWTVTlSz zkFy>>@?yZ4d?6Csd=i8j$y!qaoVQ-#ddnlu7~QcrH*{n|v#4hqtV%tpKkO9j`+~l} z0&lWX(lnY#Mq0g3Hq4Ua5o8qT)qgTSkHAXIRtQoYk9l>w*%qan0I4|p4<7C}49iC& zxCGTY#iO zOQ**$xg*JJG_&Z#IOSHRw5EOLn9gWTUPSvCqY@e2w1aDCL$^d6?1@V&#$E+Bb0kCD zV&y9D?ufEUm6=P(&4ihT9!1GxX5y1! ztcBNTUb>eh+Qc4l0NThNNfDc#>F`ljft{&X0tdUa*@9VW)a#0eb0pfz-JE*3XhSXA<=mWjK*Sw2*ErV|`1`ZNV$U#W`N!i~JtTtxXG8oEwv0BdP>O zQJ`G6MUGK)2NY33#&e=m$0oEN%T~g~+Z99XrZ@SQTlM+-Yb?8CH{9(nmCGHDNTGpy zWf>bP9`lWTy+XW6VvX*+$rXyA>uYqWZxb$-k3bqCNw~&3S106xkxs)p#b|fJvfaMq&)E4vK9GOkfH}#ajETB!6 zY(#^^G%T%)@NW|h7k&H(*hgn__RM= z?xmsmhG~SN-Np<-!pw@4z4{H0mON;WrQt@eIInkKKGl!V+)AoV^Yy5gOQ$sA=Xg)o zcV6OT6P_KT#TYh<*e$QYJ(0<%zrSA%_>M~2830S8@E+Bbd~-2nzk~ETg4t-*sz8RIlh`UpYN)3QUiX@>W3DkYNSiq#4TgZ>ww656GfkN0S+GQMEn<;pP zOZ}(B%c}E{i>m*c27=P{TWRq}o?{N;-DwX%Sjji!OGO!u+v?Zedm2#zNDBJPX--qJ zOcl(o{Ut^g`0EzP^@KDjxTu&IiDX%&f-YgaqBax*TI%i=OWJ9;RH(k`cAkR9{u&k% zSQ$mQSCG{{&^bjl_0)glfSuy@ll@ub#zf_R`uBV;#ihnU73a8 ztvPLQJ5J@5zw-ri&l=3dH#%L^_`^!G9h@ElMkZ%aBrpLe|IHYlEY&p6r&G1bNcD<; zK>F-hte+dzz=A>{j-Zj$aUrj9!6~9p+sM(;kR{UNXyR3NC(DFuT9j?4bv@xYoO)W| z2EwbMTU#8?18nUBwHeQz+fI|IJAJlxR5?572%3T#jjok;hSeX$Q;fNXVM-f!HS{AI>>6yEmL<(7ne!*^H8af) z$B*t+L%T=&c9YZ0qLyhMoB>UCEw%2D+fj#t3A$80`XRMYFNd+!l{fp8^Tb_Dn_}c! zW_06Lq&LGt5LJRvi%r{F!xU1Ca#C5g;452|5X4J0r_CFV2;D1GNF|h16Fa+1DRIuz ztsLO38vjf?Z!WTv$`^&*wkc>@t&xrfQ@MV(9)=X(cy%lrUt9e?*N5ytR6V7hb#pl{Qy8DB|;6_bgeI; zk#$u}&&X(pLF~uR0c~<(Vl?Mv@q2>q&t$emtCDPcPBeksVfw$A2%Eb0;pnfE0QoVc zQX`P))Niv>9Z-l|?|HFjjZ*GW?EG?H?eC%i`_LxafO@5}!PfGwzYp*}a0_;C)uAum z8O|@*Rfl~5ZZhgdAN;!!?*h9~Yr1hWz;8*&=>i~Y^?uz;``{!cy+V6_aKnDhl{LJv z`k^SSmkMrK_4m1BxR1~l4sGiWr-qb`^;tq3<5zMHq|QE$_5Bfkl7F1Sr7z;bW|4Y* z-3pk$c#hv1BeL{V{5$`LH}ouk&OiDN5|jB2B)$#X;n7R(UAv+T-{}*2YU~}O8H}7x zq~;i(W%9DtHlI`weU6uq0ht6DtP9;krAZ$@cGlosI>EFGNEs zyBsq)!qQU3^t!#SB4($3U5~!g$$TDq`korVmU&~d{TKH>BCnz?X%@5SAk#OTZfzJ9 z3@Xg6BAO=hcaB=O_aOv{W*lKp*wN7+32^07aJ|0N*bJ{*K0%vHDq~ss1qc3BsQhx5 zo|c$ArhjGz3=xVx;+*j=k^n*3Hc+Tn+@hj0i=hVTF@32k8nq3b2a5sb3}2qC=_qEEFTyN#B+G)wRn~F+48ih^mHyP;X)*{~Z#)j=Z+x=M8`GzTav9p{?$AVh)&@dc~RSuHi(oz4=H0BaIq z?gWVtb#nhHP?UEfR#3>Yj$$(y#+oF?6q|>zue{!p{+wp(EQ{4`+lqBCiiAh<$5qFC zeO}Gr$yOXU?rnCeXog`h2&Z7O#w@gamn2mOV1Rq@i`ZLPYFA%@dY2;zFe*cOG?z(dUTGhz>+zsbUhXcyE2=66i8D5=|%rK#Mh>l+H;$y`Ouo(WFZs5RovpKpt+Nw8BmK{yBAt+W@gMlP>(54rQI8&< zf%zYm{m-2l^%(FO85#cVKw0^p6D6P~SWPa4>pC8j5ObW=O$MaLe17gq*n)4TrUDadq^sQ#8&AS8w$=5_GTY$=f7ye zx@1Kp^{V}?nTIz`!C1<3`HU6?B7)tjc84kbeNC4~hJ5H$-bf^B&C3Phjvh?iBl;m5 zRM`YMW3j0KLYZgQ;(%bqRD%{HL_HIS_#uFd)e*FGEcP?SKmM8%7|m3#gm^0 zPN9>Oko*^x*wm&qKn@7KHta9dnc7?rB~(02 z7cw2C&!v`NkpvK8>PYJRA;}*z`H~QA)5B>u&83MYUeJr&!j{A~I3@mwX=$3V<}w>B z4J@_Y7sBcVDTOCpPb(Uzc2GBGz?OHMc{%dAmuh4kh#A7x7+tfY&GXf&A|;KSuJZ=f z0wZ=6EDW#nUz=pZ@ngy(;SniKW5+-ZEoZ(iH(VJ=AiDcoSzP02!{=$+xdU)My#1mg zqB@0atu+ml{I2=GZZVFEZ2EmHxXOpnxDmJnZyu=!bVEx>-Ei>Hf_6Yg>+HcGT3d36 z4=U&0=6QQlwS0cLM|olGr?rO>UB&4-4K?|2@d@!XZL~Vz>Bokq-`Pa_Y=lR~$?L;V z?Zu0t-7V?LoYd;W-v8?PDO;*v{B#G^i`aNxeog7#xC?Z zr=pZC{eJp}6%mJVWrl{9wyYLy3dx1AqCrsXDe4D>OUCTD&yNs&$?tFvvP`)A4G8_~ zFPEkMf08=>Eqwn+fc;nMVEnh({r_j``1f@2|0+fN^RWNR6v08y^xtNPVKp7QHF{*9 zW3?BAQdVMFa0AN(%@bxBEv-3jnyS)d^qb5&l2WA4$F5cgJPGVTW{Wc+2j^HHpDPYF z`*YVfqz@%AP)D>Jg!x^&VCYQ&A)#e!s1Ai=$>p$_mCwb~E?a=VqOH9mlX0M}aO9hE z8i)nB_RvBL(+LFmBW6bS!H=c~&zYvg?nl8=zc_?|zoC(4AUSo}PIWLlAzo-<{-ACS ziKa2-ZqniSgyrHE97`m>-GY(>>R>zaQ!sttkBjGGjqgWmz<^MZt1IFviYBeV$svob zis}YCJ&CpB#p-k1V<51@a_M|>Ym8A_b=s9Ax@Fp;93ra_*b89Brz_+Y2_AUd<$Ak# zFGq4umF*(dG7w=?O9}EHSFtLVR1uSA_XjJZ2$tYP-wGz7J9IDC;t>Z@{w$`R^6zJx zoO4)DI5@;wI+sU;niK%AvaBPx$|X0{-vZ18xsF0*iYOsQR&IgY*|dm?_=45fM9lua z&YgEzRMAXz1*REMTYd;|QT3K0WGikBwwAQwX=W`sy#JP9rxJLn+oe`m3vCouGHsxK zI)zp}P!N?Tk+c41V<)-kc8U-<@#iSgcT{k;0HrVAt@B zH2W#sOpjkPs&S)pEkfd_UYjZRBk zt$}Lg>}&g6oI~yMWUfd;e4AMFH>NJOnU6U>IJGVrrC<_;>l~P*=s**~+Z~MRuV2N$ zihw?jmZY*f$Rn-6wn$>Z>2aMgq3Vf+h!XF}Fl8VAzSRS)&0&>rrTPM}RsQwU^=~^= z_i`AAB9K@6Lo}qOkMVeK)rJ`6rAN4E8G1PMFh2hj5|(x+uC^Pm6VjmNU_P^xv&!#g zUj(gZya9mc^OF$c@@rH%^NXnE4R>#@9DDq>9nHHnd*Sz^Ky0~;+O$&hA^*@{^|3kx|?dR?35!Mu zSv00eL@C}K<&2^Oq*|N#Tf%o!>`2896Y{J=%fv`Tp0D9lC#)Kk?f{$ChpU$(R`(@> zv7$81&0%3kGU>f};`B?z1objoFAn)jhY6qTqzghR0v{5gM5`=S1c9tS5PGNA`j*J* zh4d-->snhY?D|s1EhD8N0fqBCDPhLMVhv>Z&fZ_(VY!hCocKU=>6PP7|6b~xIP%+g zBd--l;W^RZ_AT}oFTIkCA_|$9bM{WX<3W~yYA}%oAQ4MTmi6DFnjs1L)A+Ja=Dbz<&A)%M>-ZlAuxC}YdCpbaTA}$ctn9-$)!89i`LEgWde@wBrx>U^M>?5ZfXoJ zxs!Wq*C;3{AIS~DU@f&ScoKkCVy;d=w!6d8E|K&0TG7alV~`SA7EiWY;k^bH_~aI( zzEa>yGpckVOg@1cDfT%e?+d4!rQ5e;OD6gJxTqeU4xQPLMVMqDb%TH#&3Rukq`gJG z&wV^{=4H`$!RB`(f?m=OH7=gxqxVRJdeD`og zs(D8o3@kOnOH~x96i_RrTTJlqTY&wxQIMGitgDs8vt+_JTlB?0@Yq0u z*L37!CGh=bxqsa8nzmh5cT+X0-=`d=zB!0Tj+Ww19*(Q_(eO?h1|87ySGiy$MSYVc z?NfEAn0!+h`<~sJ$9zUBnVimanVUO=&^K?l)~#8oCjV!p`u$IbN@fPA{FO{>^iK;9f$joriNuBZmH21Ij*tZQ!#WsfYw^r>o`95Po7q}eq zIi(NbQ(?hU`-ZTn$6djSyxxH3{VH$u@+n)dMHkWhJ2`yG=liY+i*5I@(#6Y#I!lXn zk;NbzXpwF{*$Wh~{y_6V{^ zT|!y|Ceq%|_Wrmc(!blTP(1XL?pmN&D~sKL>K;xdeBq@wk;Gj+({(;p3f7zTeZb~M z=JVcJ>J-m!GA*X#RF@8F7lCVi2+zP)w6DS@F(-Oyon3eb23|0vy3UKJNoD*UY280X zLf3Bh>^;Jj{@t{QtxLc)UMFLKQT03e0pV!Lfh9!WW?i&zG^%9nc?hNvf`@p|)Ky@< zZEQKJ)J4_UBIU7V8~1x$x1**Dcf0olhUIJ+RuC?l=q^KmERE_20@ptQb|#WCH%uW!v}j=ZO?Ibf zfF->{lgv#%P2kT5Vi~2({_X1sFu-6TTYCL0GJs5=$1EZA`=9mxWrFSvfSGW3P7J8> zXM8yKZz%d-#t2->|5^U~2SNEC%JF}PQvT1Es%#Y|M62@|a$^%UwDv)5IRYgOQyg~i#n^t@v95E8pMkI<`DDAIGr8KuI zirOd$r%DuuL_3poVZplgmE!ec^UO1&!LUtflJ-s2$cSa}6!M%jdlw@s0e0M)R7WPO zuVCD)rH&O7TQ=?%gsZeN8^-t`j2c&r=*8rm0c*1xH&ah1Q_daBp4g1FZk6NQZv+O0 z3lHDRMqifP@~{IwV(_fD^q5c>s6GU_KLn&geFI2As)hOK3BSik6b@+kGSV(i>u}LVGUkoFL#hhLWx;~H za4z1xs2xMJ5Yc6Y8D92_a6dhfYKBQS68u|V=PcV3Yt=v=n&LfNSkC@DX1Z>L83`_4 zqB>`F$w#~lV0xs@WK@X4?SHOXYrED1E@!#LlNBU4#N!=}{YH9fEX`ISx08P_)yk+H z8kA4Ekbke%9-nm}j&1v6vF|y*vbY57QJxLQK9QS5}^U&wMtDrIu3EPf5S z6BVuls6k%co{s*Gd$(_;VFf@_B_zYL5!%tN8RGc5Hpqk7VtKmO5_)A!wZx`{Q6ud} zn6_tL=W2=AUuS_(n-YV0OR*(@S+FUZ6=Z7LoHVB@nIH@|Mgdag=fbc$o9L3uMty1! zhNtjQ!!=@r!vGyoCHiaQF^95`S4Hs)q}Di?rS>t);m|9?Hpv6Z@$$h=GVQt#L?>_! zNQbRGUZmS!7cnOX#&y8mP!(Wz>g`VMGslq)wM@z;Diw$vF zBZP&)4iLeNY!MVSwobp$VZXp<+o%k=OvMEHMfG<&2oL` zhvU=U&{09(OE(7Zr~T_T5xWd(JO6Je?L6JOJE?{jg!~z(Onm!q>Gs&{+p6RX;H6{Z z7Sx><*qkQGiU?yN%>^+lU|F9}2kfzI@wM`UqZ1dvZuP@#xVCq3AM+72ul8$uyINc23SgXESmnJ?w>100y+|=>Ttsf`g%MwcYm7I zb(hrpi=|#R*y!nX`!?aKT~W)jeYP^AJKo={t&_w)K^6)lH&Ql^HBuE|_$@Wl&Fg@s2#pmJeS5w< zY~^xK{W&DAmSwQ=G}KpnZ0kSWL63q^S?fP}?sZM#DP!}7=Qex`9QT|Rk!UsV!2crh zrgwD3yPoZpI0|@lp)4q-2Jg45_dIlTg!N8@k~BY0sTkn<{`~oekGolAx)~%CJFozr zur>%-sXom2#mZ}O*2-Ty7Cs8xMTfQSnqAVl2vmAz(41?)ND+tSUda^T<)YRPT+^RT|dCmkN|xa-KeVfNW= z$aaQ2+OSEvTJsRuahGW>EAaVpe_E#N45#xNa;&8$HlA0qgx+wI~bxql3G`Y>`D@IJVkrW2_bt-I=Wx2T^wRtu0Sx}CKmwF5i? z;zt|q2=OSIAY9k57!%8U+1sW~%xE+^7HchI43SO3=W1*x-EjDbgJHzqaBWq^way%b z!O}R^PHCuu>1cE>Q7$sZj=E~(rFT6pGsWQjhjCQG2n)hyU~pP&1Fz>#;y!pF^t|On zkt`r*v_b-hWJ*wif^Pyz9O^f%A$8M%(OX!;8nuTptz$*o0m6N|FkQm|xv&zqtk#`E zPguXC6mCr^$?&XBffuM*K4+jDWuu^N2x!+QYm*b$Yj&S+YfiP`Lg3v=| z77s@PgK6NHg&Z!7ly(jYnV}Yc6%7Z;EWXD#^9IkBF59bB9uq_ zbrorBZKX?bFVJFgqsCEv1O!+@CPXzdhR$U@fsPoW`f+{4TpMxont>1pGKpuv7^&%@ zaW-b0PqIhK80qB@;ohoIkqo95yxF2hEJNLM=VAZtyJv zs!W#nB5D@CO=|xSiv&Aq6n0<{6$?Z`%KkiSlhgR_M2&Gid*DbK7gY=J7^W4%L#Fu% zfdx$JYP%gEnu8! zaQBQ7-p`AvBSCBd@n2cq7Iz`WJ{Trk=OW&PkW5#^&1a*=1zeI@KH;{u5=J>rn7A^* zotslwQPjSFDSLuP$bGwu*?S&myJkQCC_gtMVrRE7WG5p%UCUYb_7_cI1UF1_RMiA8 zFV;ERX*&xPE+=n==lYUk-e1bWiGavs#HjX2lIw3L(;0+w6h(Z*0Tr2)*y$poA0 z)#r&)VWV30S*ModuT0>_G*N@#7>Bk<*WfX?)da-)(Y(c&^2IUH+P)uDEeuS8FDBKg#*(b|JK!!P*;%Fndak?p!T9hzzomSgJc*Wa zKV|@gpwfb$wKHgm+JZ+k7;R%etAw+S$rM8r0JJL8m}Wq(TC&1juueX4$z)t-fwR*l zR=n>wr`r(H-vlBmzP?1fD>{-frk!YOoP~?-$2Czif!vef z#bN>WRSGV@T{UkRU^6a6t8c&XDiy#!FphF`s8p7zhAG@G!2aWUmAj&<|T=iQOe6DQSvd>0g|a`gtd z00Ur=>QE`|cGLSuD&^GlgPOH#&AI8WmfP9UHQ@B`5aT4H75r3n%h{*&K=}hQO;DZB}>P+X+0liCK4~YFm1mkFq;0G+lMkR zro~ib$yIc!fYlwr%sIZN0oz{i<(w--xQ$@#>$wcdWfu#E7|KtvTj5e&3k?!MBLA{Fl?( ze~bD*75y1m7+C%}txeX@wB2Awy=usD-HGO&0#ponp2(+(D;~q|mD(_eo*dA_L(Zsc zGMuO;E*ZOCeeLrkr-G3PPl(93-aLDUea#wh%=_?R;OZH6WYpc9fo4R^;0V#Zj13xZ zBSbfkW*lD#&+ zJW_x2rSDGtdqY;DOfUf#&Ga~$?ESTdY`Kq?}Qtm2%phikc;zOAR7bM|MW zWL~$%i=jLEWy7nkPJIX`;=_@gxVb#Cv+v`bjco&7H# zjZFk1cqB;*RL3v9O@@#pxwPA~6EFj63M3F~bk$5(l~bp;XUkB|9?~&jf>SMn73vJv(sK!gh%#E_@$3Rcoa5w0!@yeoMYRyMqpuEGquZXtMa&h597MAHl< zH*P`W+w|0KpF)w@MN=0iXfv7lpNEPpTSvb9Q!jd$eh&p}5bl4j`H$e+FR5hrVppr+#GKl#k)41`S=a-x=; zDtX}gG(D2QO}k;>5(M>xPom4czZy^Z-ao?ChCl^!sMSIl)Mgwqge>8aHFTrKQK@uG zgJ@uyTa7{vSrOx`=?cuo3Bg}(Y=7X2#hMj#&+%~ z%oP4ebzB$}%FPlo2@|_`8?`utkOk4Se&?+1#YOxMN0yiTo93NC*TA_WCcUfYD&04r zfoUZy1&ZXig>yz(cgp^Ogdof!k_0))sQ{c2J#)yQ4uK&W!c@fN1$06}clCU9)xpd& z7uYukND746kPQxd^aa3Kmwa870i3KChFDw|RrW#AXA2$ZbLl>`6t~cMlW%86UObtp z(G#eB{M-gpI?p5kK}LjN81X|kAz!8WB|bPN(EkpyQY>le4HUQ0cDg1Igk^} zat|R#!AY_}Vn%*g!Gk9Pb}9ApFv0gibc9Nxvd0!!ig8_My=j|!XCE{z&dkmDFep$C za7eY4aQ~r6x(OcY1j0&ads9NA@WC)jfyEB!5Oh)-&fWSNPFglrF8=mgmho~<=QhC4 zN|o7Z7qyDLhGZV2SJvOax2NSI^ppn6RgXfnVtvLMR~b=@?K2a0vSpH?ECB(9+J9oD zR`kEy!}kgM$jAefh_$(}?l=TKT3bCO>$$*)^MiYH$-+AnBmWAafeNs#XWNNru^4v} z^%LlRC2JhZjWW<=M2@Evla1DEmrLX06DSAt`ytF#0)eVYdA7ZM=0hW*6$(0Fdavi` z3mQzatkyOzG@TAnpne8!gE`^gOZh^YG=%BJS9#0&Oq@^Aqd*_?lK>xOHF59!QkAF# zO*NcqpFqFlwqc`C02PYruxG}ZUod(px>SFw%{1tBfDE(N@L0YO`KO1iW+VI z?9zu`byAZDke2G=!smIt?RVqS$^F`cL0Oiuy{lhp7IVLJP#UUzj+U2Awn@i&sx6e$ zg{7atF|2qi3r}>4org67iQ9mEKVSWo7-pB2Q#vp1&s$>7$1>Kw@0TZ%GqPxt;=uEC9?uyR4? z31P1oWOkj+eogHv2Puw4KMg5SB8dQacg9)~dt~wQlJENTAoS zQ(gUhbRvv&K!pI9>Wc#xjafh#)XLwPNPOTRoB<3GO6;EiUVvY5f97xulg)RlEk)7=x& z-udE6$@Q3vyAT~2I31XncsX4fnHca;B)R4SEW_jdo=_axi7_MtE0NRy-I>)NHd>sz z>`!|Oyqth%C#TKA!OO(9VD7mCuhGB`?oBgFK*>oPID27`#hdej51)PjBEhdtFX{#Z zj_g~aT}QB8k(*NN_|t2dt4q`@i8OEHg}rDiTAw@QjvU2j@a^*^aPTot4b&%{#-R%A zEfguue(fI5qO#|;W%GbRcvpnFrk2Y`;qRA&b|X>n$U1$}s|~f0O|?x#SOxNfLU8{9 zM!O4M^nv0Dc)^sT?(*%9-xCy!L92J`G1&VtZFq46z_1hiD2V;h77NDU1W)$n6=$lR z?U|T~1cPsePO;!xZ7xu`3^r0wQhFajx>=t^ihnk6(Ci5J?AmQoN96_5O}FT+lstRd zCM-MJ)o4SL%}i#&p%7su0S-L5EJb_eDVTpH+rQEQ!~ULQ$7CwWVpasj3O0vU!>#b> zsjyL~8l!+Yi^*GYeK&B{Ufr9 z+UT4fFL^d>52~$nxRm>%=YHi>7u4E>5onDnM`VM}#@kC^bj{Y7HTt(1R+(&CQ83SW>O1pOs$*OZo}o8;`1Lv#L`!FJLz(t-Jwct5kY z?!$d`W#!&&#&;re4|%xN@%j#z%EYZZaII^iYigaHrg+f(PO&+KaU_oJs&_K6b$Tb& zTI1TvRrjRN?Yq@20czdO#jeD|M!9((|-eB_rFQA z|AW!~{~*o&TTuUL4$Hts|4-n`k2I?xZMz`~|0B)jo8m4A{rx;f1P5f6nSa_XZgPfl z$R$}fp^AqTL9_Q2_q8>ZAla{!x~fDRj05U!u{AL@*^%_<(9-hKgOE*ejpUw!nLq;d zg`;azkQjC^ujg(VbLene`)K?)Zv%@K$1qgDynfYIZcc?0@%-kNvCHZ&I5Wzc=lNzl z_4RpcJuffL)}y~6WTSirkC-}vD6^p%R_IHc4Z%bPd$ZzlZL3MnscVT}PyM?|ub*w5dUEZEXy(~}J znXL8oB!}9kq_>%!4Fa7x6@W~^7Ds=_J>VBc?M4No-U|85Ih-{aQ8r+i2`6QbjtVp; ztO5MH%2*>YlmPUi!{E`?zJmZ-2)c0botzDwyJL=8HrcpeA=VhqZlQ6D{Lzko=Ex2^ zcSLc!<;Z(Z+V~h9YZzwY$p!2+_2$?R*J)VSYZuIu)emXfD*v838FLZt;9+QGZ_J#C zEGqVOmUW{*{6L=i9i}U5Vks_$q z0zWh_WkRq7>PG~VEm_>4`dW)7hT#9US!ge7E6WKRT$o`09fjqYNZ^_SmC6$nJXZ!; zSY)hL9_-c=S5?Atz0*xi;%9Iv#k=?f_xAuC4V1u0k?gCAT2+Q#<6(Zph`y@B?l2ja zj4~P^Z`IawHcb7{?#E$O zH46OMdxx*wb-g}bDNo;Q4C~u3eZ|*dan@13)lJ2A?;n1F ztof<9Ph?_)_TJmL_ZDQly)uEF0>(G$&|dRQKXDqwAC2x7lch%vvqwwSR?i(z|EKS_ z1ge$Z6}}229-{3s0^o*q?E59(&tlzMRiaDY7$vB*ND#Xe!kh6mE0@MR@_+(|+9 zwyBWGb2C(Mh}UPKtqf38OL)cu=q@?tMh{FAfx>NX0zU)K$RLPG&o3yv3iyxHz~<6| zbwY5p3pmMWc^*8VSzFlmEBM=L%V}wA!MP1GdgB7Qqaycg z-@0oAvG(z&&rY9vqIFkjn|rBChr>MS{Y6L1v)>MH$r|vRB*!%v2B-Y{4zchA^F;D^ z6Wpts?eNKy_Ck!$yLE)@9rg<&Nz#mJPrLOn-f?FqCF#V>7NVYEA+LVf)%d4d1s&dV zqGlB{7%IgD#P0^04ewGzm{93P&`gTklq4)TyI~xPL~O?(G5s2(7|H_lrI!auhQlZ< zg&LFw_!3t-j6!gF_q`VOn1p{jN{7)PC!49ml&K<@8nIv_-em&KaS*a-IZVCya|s44 z6@}Wc8C#(aQ%I)0(Am$!<`A>j3E(?skPa&}-AeH2!2s1ozWZIcM_AJKqC@|4*6%YA z!;~=25OXt{k~jDgbfW225{|yKpi-2OypTlBkGU<^Ds+KvLXKc`k6{=I&C7V~_J>ag z`&&mS+!8hCvGWx)w%C@fh3d|if>t_T-_7lm%b~j)&u^a8VxN0b_xCG@z68&g=ZXD^ z{aSqLFG%E7+9j--V+8&a&zM#oYXlp#IajnUS91pKMV7n49JQ0hRvfOJEmEu^6)y zg~Aetv#4fUE>yKm>SrY=-f}3Ua~2cT)UH!s!*7AENf&CaF@**4g?50xIaeKA+c*+k zX|mJ0k!6|FHDn5)gp3Z9yE|VEN*3vo2*i@$7_ui5V{2z6uRCzQY!E;M!pOfN%Nh*q z6PKRmpY#XhfLIe4^H=FM^6OP$U0JQazHe-{CJc~k2StB6gjJ4P-PWx*#r;9Y85I_e z*PkGcd%P@f)c#F(i58;QKmYgBE%&rZbwnUvB!RrU3q4_}i6+Z51aFxI1&Q4s%+V7@ zx&R-!-wbZVAWT~jFl$$BvcHxY!C!^&Y19djVq99=+R=`3gg1(YjeX)d9%Ia@pE2b~ z5=@@3QEVPxxUD?$yyJHU8n!=pJ1zc3m~afQu!GvjNsX~)i&P-9BE{qWZy@%@7Z6s`bcg_5O$Z=ZIXB?h7mdZTv``WY#V%fNI-k@b8BVZlU6*# znrh09HNE(*H-nP`_81gnj`Zt@^Sv|p;Av%zQzFVtWs#M}XzhnP3Uc;G7%5-Ldr_5=1b(eEl*cSvE$~P&E#yWl^uO~- zc<6uti90K`IhoPYYteb=o|^=b5eS(5{YsFZl9m(-KGy{Ba&M}2Cd;caCX@RJSZ2JE zqvR5=sD|Q-W6j3e_LLZXZ>o%fvpX~>NiIMc0;eoJh>IdAzSB0;XYNtm5^C%j$M0_Z zMNgUf5Ql?Z0Q%+^pEoR;54P`|0)4ACj=C+l*rzwO1;N&yr7@+MX;bvA=MZcZ*vIRY z^*x=GJ?kJ97J3)!ydjmI6A&y2biVIf z)40W9`RL>Zi6Ke(!QDMH%kKp(V6&LKX`?|DBDT1$+20rDC#k+05#Cb8O29@6=R}Gg z-ZcgSJzVe_Vzx;kvx0C7)uF+LdCp(KlsF3DmwQ*8M(h|uEu*J_Tkd0a-?p;a%82q8 zy||y1S6L71Da8Z^Nw(HGUtnEOoO5>X3G!}1pVJPUjofZtR*2mT42SI&sEbx}8+9Gk z!>iHX@;(Pnll5HNf`$wuOKHaR4qc%d+RujF@1x$=L+vrN%IJ9d>hXa+<}||@Qv`P4 z^0YPg7#!e7!tn9Aj>rBLbut(5Ujz06IFXqB3-lZ%j)KAB>*V%cxc$UH8{`~5G)~w5 zcu`C>kB@IY;S#%eN!R{v(Itio*|WFk8|~^9ppFtCO`^ovQp6cF2;;&a6pf`A2F~=1 z3hbf>84BOrp{lzWilgcnt1$Q**JVjLoI*@LZ6#wzKT&-PjUDKm0@&n}>$O%Ar7ZCB zbX4IBhuls;1)^Ha>xm!GPJTDD!SaM_2q&!evZ75OMOy0~KoQL&fGFNgc}`ukWAb~r z@OLeljte~Maw)cG*@nvy#rJ3;e>xh=woPgdI}uNx3K`@OHFw@Dw|-AC(pR=0|FSe} z_oPLNT+}096cI3WIAah_b`pedfhc~}pnIK>#Na9tu>oL=Fod-vP$r&m)F@howEVW8 zU0sOt<;+coxS^pY6?#K%_nV$|>MV5b1p0wuS9G3-ilQW5ty&O`I^Ks1FuM!P4E=fZ?*D+7s_v2_I_*7{}X zYLhWaS(rh?G?q@6zTbnt00Fag$Ld846rf6kV1@r0`%^0gQQTUygKf>`YcaWd?Fbb$ zVtYuyth-h+!f1)bQxuRj;KtC_ri*;#i*$U*BoCFZ2=j}C)>y!}h1CYh_=>QcTb zRRpmb05C1Bm`B(HX(|;E|%)t8ZcJKew2mXJL68~GU|7oJb zz{LJfe3*E6qv}r~jTPDFxd#8^5E_X_vYy7;JVHZ4(^dj$@NBsc3REQDy1G3%KcTqC zC$vy&;-}SLcm#d`Mf^^i_esC~>?xRkI5^}nj$p01C-9$}mN#i^u;{SxP*}+M>B>|* zjKN0zEvyD&2nXhlVlmWH?=tj~+o?I}8_kky&F{rH*5(jk3}7VNuz*%atMqDb#E*@n zh|uOF2wl^ktx+TCJ%1@dNV?`?zD@4cs;4QGou*3mHwR)2Gn5!h%t{&Pv=mzo4hn6w zrw}5q10Xuq!us!>rbX)DK5s2UtcMwxs z5&0}h&&kLi>pneq`tEpEfs#(E9zQQts$8bDu?&xGISYKbCB7Eob!t9UHFCjI1zcbZ z@1rbCK6jy@Kn!*C1E|H@p0g&QMW7(ExBb~Ne~3t_`D&&O7Hq!tDtp6{H3d2YL+;>cq+>zVUqIvb*rr->sWzg4w(%|IdY zjk9~HH>L8g2@fovWJKNhE3=P0Nn47Q*JhWsZBfRtDEsW8zk;`9hK?GL%wxynrraQU5qua=aP3C&f@F0r zj@y{Ey2*sHzxm{aA^k-ULd7t?3^1+-sAbfe(@Jd^RZ)ZusKRM>b?rSy?#drf5?=_x zM4PP#a*}9L_D6DkIy+aP&uhv*KEG@ta&0qz6n72Nq)@TyXlSgBCviimfkyGt3yKJP z5Oxh#ye$A=Nqsw5G|tzVfEZK4iGYxO!u_^U`nm%`8LsEbYmH&g#@|?^C2l5Q0o1g{hk_c{V; z7%O{v=E04~<-&U0R^(ZsXV=S=0OIL18_Jhe)>d6?j2yVOlhEwjY!g zS60Z{6@@DE=rK^6A9L5Zwohw{)0jB$kn0O=8o<_jM3%Fs#$gPttPs!P4NbbP6vFmX zeZF;~37((RQWUB+JNwVpH^a zu+76mcm|0^Qo@_=h$jfW_%tdVO5OYLmo+!8ybD`5a#30QmQaW2#= z3de-B=+zTPh(i;>&itYCsR23T7}jnntx@T9bDAR=L-lloPEywT!R9#hAu^ zw{rUEz^G%%VFk1Ys5XXdTvMQj(ad;yq<0;;w?_(`6B}f}nlCqblZ>q&C6EdgaMSc( z?72I`&6C9KJ>DycXcAtIE5OmBP)!Z6AJ3wVIxZNn)5r5c#69&u#gI)Mav#wrU)Oe- zYZBY~Nu#7|t(m@XbO5EKw#yu@(Kh6*RMFWJB0snv)vt7Y)H0=8M%h-n38SrGKWk>+*|J7{sPt>ufxs{W#1D&YV zPbUvyV?$dbW4iyB?@L3qx>HmpER2UGSDl{BJGMbV}!#kK4?T}yj?!LvD|%k-ikA-#Kd2$ObRbfw{b-M1n#pO%@TD zk)@;AlqcUVktKR~8rWy1rMuF4xMZo4|H-gV6>grWmV`Wiv~skfFM(@Po*bpd-hI_x z*H1tC-FEw$n?9YLYekcxKJ}pMnf0k5%UT!$@~YAexE@=E`5fw}u`6Ivz`9LiV1}wE zK$RDp#G5X0j`L2v9_0z}`Nw${4toNyJfsJ6u4`B8#iDm|Y;ITs=L^?b?268fjc-Vn zV>@_M(?AS-Z%Y#I!lm2#0lI5MhR_7*GWZ%jt0_nDnDwj>%8Ifu*n1Sz4Wn}M3eSBR zza8e_lcGz=*C3oaRb&-nkocx*T20t?D(o~k)46`tfyh$teAS~Fi{rg*(3I$$vpcfI zF)}AX(4l(1gtXML5!6V3ux{c&J}>KXL&-n8;&eK`QqZT5oys)1GMlG&ogqYr_19~H z>0NM|v*uN5zeD9mUA?F~^n3~J0`8php^A7BgpnMjc4#@^-vVsI>Y|NPTrpMwRzunz z+4s_{rv-tt{DX&tEwZeF=f8j^WUvM+ETL1z69fdK9?iklBVLVKq@Jq-l^HlAFFJ;{ zcVnd*;y~qS1)IZo`(m$_AEqDP+cSK7y5hubg~kyBWDHQ;>}nkTR>KwakT7w<`WqI_I;Y8E%w$MGWA z{`**&*==xg%JuivhBsQ~hm{^m7cXJ`q{j+$txAZY%*e|@ zVXr0FNoI4Awu;{Jdqqhkg}orkPaU1Lvayz=sX>M)tYp0~A+BGjU7rX{_4Iz?K^B~!jG8YJ zz!S9w)qv@UDiSCGBsQi{M=!K)m_P`-Jgcw#2g zTBW}@pg47b@Mmb1Fh$-_k62(~zl`IliZ+yxg#&y5#UO2NDXqtDkZDE-oMg!mGyXm( zS`XHXb6tVONM4cWWK8AqlCl117jL=h!O3!+*|Ws#RZSy`dc}NZdeW?3a!NE-KiOcs zlsYU_@V=h@h&QKeoQB7zTy(&K7A~S$U}>A^a7GeNo!iP&2G%nnWi9(ye`fSRA8F&2 zd=0FJ__r+9Yves~EY#vmgW4Ti>v<4fjco7VgycnWb^0pG`F&|9F%2QLbivMuc=0@Q zWYPZk^6b82i-nuC`0Dw2aYKA=ErprgG1QtPTbLy#sdARzlOZg$eB|!#;%1vv!Hf{! zs`he;>Gth$;zSF0C0%`oY^w+Cey|CQ`@TINDIn?P5H?6J_t6 z17VkD0hL0osy}K`+DjL)K3nV(C!3is9&-w0%@dq0!6}xo26JlL3#n1SHz1i6$#kW1 zZqX@}+z|nAspx=Z2E&!mUiwhZZNxirIpy4N)jkf@0y(fbmkV&~lI32a##w?~xIK!r zo1?#7!>a;sSpGe5f9c|fwp!_HM#&VmwqR(2c`o`e2KW9oT)7SHIX}x@9A4M@F&b=d zIa-1TRKR=L(d%tC1GqzAvE;h@iq%_Dx{)Q*VKkW)z1K~jL!ydrip)x2vVh`viw8Pg zl{GIQ->@)~yh?Ucvv0&z{e6dijB%G0jQcF>$fdWWy6WQNebEEf^#-~797g+O`^45) zW#C(GFqP$um-U=V^rk6c+=6#v8&pv z**npD-|N`hz)xm}{`Xfl;1fVBVC=7|Ue;VtwbMf{mBK4zC-K?D09bGj!P$ZUNMIU7 zr@&lnfpmNkcr;-jz89Qb>0CHB6n2;gK)PRI{x-W9xi~KbwFsp&>3{x8I!%Q?)sp(w zp>EZPJX_51XgFn|TF>TJG@50hc8Nq*&*l@*U@lfKm;4RkmB=ZTomG4=;*pn~<#|)% zQE@$QuRg1vcJ_*_v{#^@s~R;dS{}ostYWRKp`64$rq$J_Fu`p=lq_pxq#GKSXnCPM zQQ8~;?LhBv4PD`d$0HgBb-lQSwI>%IJN&NofzaK2R-Rm`nT7!maE?Y}_|@h52QV#78JI{b z_DZReL?Zg~nxUD)OG=c23b6g@du@L(^Cd(iy*qFg)&;ute`7sYAfl=bC zREqu8_{V(npbwcTiaWrJ&%FbYQ6wxQu>qiGM{quNLXlG8jR1j+VXyL=B2nN!;=V-_ zi123tEj_-Lwsw$`L{yk^%*qj{>Cebj{EXjk6=xQY()K3TI!GHTXH~T;9?M78*|ZOn znb0Z8k?~f22)s3jUZdrASLR=4ga2w|{)q3&Mnun7mzQZ1e}e!Bz=_7t-Vk%<8mDv_4-H5>SqG}p#m?#^ zrFT~esIR^du;Yf#Gaxjkr)!vsalrT zHq<9&R_ps7e&3gy#{7)LV?|mKr z8|$F|RnO?ZMe?6&f{bkcD_mcs`P&g|1mWq=iI#(qokBUG-F7UcItru$qUu^bVS&S_ zHB7*v=->c8dGZ6-N8rL%N0N(Y?J7oE5g5hoyGeTqp6(2-DQQS}+T=~m=jBkv(9jXz zps)-o8=5_8R@hCB!MqP;Rwf1Bp5cB`CdLc16zX}T>BQy1aqaiV%Fs70Fn}LrFjbx@ zC3qQ5pl@vNAE#kiyGsB&;o3*9%hL~wh!;~NgVT+lsy&jdT>9jT7$@MuMiHaOBC{0jSE4qm&cvZMrQ}x+90+bPPEn~dDBXu0Y)C#R6X!h0lPno^7 zf)|I6$JkyT0+nggN*!g$njaU1=47n~_so*J&;Rcnx-+5VfZbABN25TbvL8-zM+F3% z+NQlL&Qokf%${BYO8M&nyP`0V9+MYVDqXDR=CF8_&wgb(2Dp|r#BBg;byR9}f$R{* zx=mC}rePfo#EPG1sND(m#HEL-wJ20^zP50(T7t4IGp_#5B4|ccutyQ-++@MWv z+De~2^OEfbi+XC%c{Lp})C|+1=g@ zKkK$PS>>IGZJ>j@LQ;nOHl*dJ3S7>C8?lG)?Mh(_rZ^Ebe!Iq8YUIL9zc0S1-sjJj z0A2)3W!@HTM~m<%%ZH^KA-S`l0xs`?!d4FZTYO`n~BFX`X5%7zQPYoM7VE3m)|xFZ$=Tu2^6_}5WpR{X(FiQ7pO zIrlb=LmtECJrNK)nLk8OOtNpLI-b8OAHdj#j0x_{1^;eIN}QxCXBFQ>0|I>rQ~e?J zWCpBI2{?*ge1`r$N7vOu?{oImpabg+5_S|>#x4rFpeDY6W@y%j{{<+xmNd^zJ(s*A zyOw#e)WI7)u77Nl+`|W>Ml{~VAjyciC=mN!N#M9ht^ji)@ry1Y0w9}==rhG`>vj!n?E945 zDgY-LP|Ty&rI|!lWm?IfCBK~#)tck1 zpN%m2U?#Ig<c)k1$V`$RW;qwB}@3&1l(?0IrCFH=E)R|*QeJ{?~V z8F$Oww>rXUgeNviT5HiHh~C5h%FI*qVr%B-2+D>oOC#wnK#BR;AL%q{YT#t+?A_mB zLEPrLa1Ys*6;JhfW9v*QxnF(8L9gQW*$a&9Ev5GzN|b4;4ExG5*u-o!-n;AUYHy4G zIxjtsht6dML)5?zW1RN0ZDzC5-uR500Uzzxfj7$DKCQQdZs!?vy zy6WsJT91am2oZj5dL$~nyhFtIyH10?f%?5Ia5_Gol>)!Ao!(a#40lgvbWtbEr|z~7 zs=XIYVJfx-P#IyvIH3y?7(mZh;{}ZMjL^jk(kJ9;f&miz8`|NW`S6qv|C(Ehj{%<* zNPNgE%$0?{|0>8+!Hop;IX{hnrGYyk;J25m$-f>=1BmbohetLxiOtWDkd{R+4VMG< z>z6F;}uqz7wHVV+{=k40Q;s{hHk%cJ7x+Y>%^meR*qCXk?}j=@F}3IZ46`c;V!Ywtl!rnLob31ONSNajYXH*);gExMRxU4SDI6%zSMi)8 zoZ`1;9j!xsMoBk=++X3`Ou2BN@?zU4QG+PfI(#m~yg`Z7)$7Q+a6$xPzh>q?Vrom2 zgyn2-vF{cQ*lq-QW#I_Oa9}!Bx9bEYq?D8v-cBwON($q>bBf@R^<-zrVgjTpyEGOt z1Wsfi(7C^6YJF?wuaN7Jj}#+`aR)^4tJXi9=_eF)BD}T^2-ru;JGzWIKI%ewh9O@~lQhSAE85-c zcWjZ+J6`h_wC3+u8j7@=P&dCof(vQ@ORst{4LulP=!qp1Ro4FBGUlW8ehaB{Uis$c z+83oSXe>-?eVk@tlYzGNe$LdmBQvN`h-7wrMNNM2Rin8N@kUfn!iH~b>pi=2&}4R? zIarM%cax58_305j)HxhDc3FF`Clt8K?0kV^3q$R-1r;$PdB=K$KZ~ejl#t$f+3jJ6 z{`g34%My?d{&%$Yed=fZP|?@^!|K1DDRc`H(+_U?Nub?BCC(41Orfag#tb$r}5ooNR`a8a4j zmihwA^^j$+TfIF^t#dIXPVzGFL|Rh6r&MiG(Nb04g}fDM;1bd3Xnoy3P&$r|xa=M_ zMjmS_K9BN0+qY*2Vp8eIYIJ7ye%b6j^U?Wq#5J}SvSkN0G4Ss(Fkz8=6)rn$eJG)E z!uwX9pJoglwpkUYTz@@mgk2!*!aH(Z~@4~~! z%5~^>1s$qXOw>$FAPR%tR1gsEOrrLy!Bd5LCr34V(I$B{f(tS60f0_HCI%0=^W!S- zu{z_1EiayK7J<16*}7)eUpp)!R+*qWq|M>5$2MWwgJH=s$9ohx)w2V6&@jw>x?N6k zza|5sJmtZtb4p+J?(IQiKa*9;7-oCBENUVb0n%2Wiy_AR%4TuST?PQCW`tD=Q~(mOR=sOO!-I>!_oK)I-hky`@4sdk^W;|Ht*3w z^UtFl=SO}?Gw59}89}T`?2kPtr7PgyMQf*e zuc<}cv~y=&dSTv>NWV0R0&0%0vjSo=A;;neD(x@0jv^0mF1=sDSo0~U>jTS7FR3u7 zU6=7vMtbPDHNC}(FXdN}ETI-tY4ju38eueR};5L1f@XK`#0ZVe78@&_?nlZZ*S6K3=1(Le_I zV-710hM5lVozsfzk%HGoQw;b3n_w&F@^}dL+l0`=Ly>bnsneK9@Ai#xsUBVRvK&#v z*;Kz)bJ^qeO1oNpUb2UI6RIe17o8aWyFmp>kFYId4_ zHqcmbT;`4ha&mC(DU|xfB#CriTnt&i77FJW@w0Hq zjd=snKa^`hQ}*?j6DE^gg5g$$d;X=@ zC_bdv$&yzjqSZmKzVz8sJAKoL(PG7No0$jEpLgFmi%_ zLaQl5kV)46*liJ)Esd%V3jV8n%}1)mL;)`Aa%q1V81`(eJUNh3UWt+D0U-RXKQ1P9F!smv{&oapmawm*z!Vl6ffhX3eXiismb+tRN!t zyM5Yu*k1%=Vd+}-njH%%536ZVn}+Cyl8I|Pn~rA_AXmOD*((zkqLPLy?X@`iR~;eM zGgsU3kf3_V>6*MQrHvrzjGpIx7DG|Tw5{MkxMXYV2PHrgB*+3n;05d#*bOk?KP)T( zpVyicqZZW}wbdl9B{kz(aJ=Z_jju zpdexE>-Uf9#a*~IQRIZc&3KC(KpxI(!vqF$5B)}JWi4($RNrpU!%D0jkW)8NZmaD3 zhv9LNb(p674In!V(9G9F95VbE4EBk0yh!QwaBQ+D@{|KUnYLW)*A|(inCEGm zcUVgI7+I1x1Y&D$oTDR*1c4A5rGbW(+{fWskq3nJPxsMYd8s00_S5SjVROkidR=ID ze_dAbW^2LlO7DJGzkce7fxA5{g%z) zWvVa+D416)e9nNIM1D-c-6`?)iEI!skJC_2x(QlK#nN_SWIIYTOq2`3GN7vDKzrPz z4{3he^a8odb?^4q$Fo2K$4G!7AVg9D$>)~16h_AWD+(NgTmTN|U8RH|AF!j1vB0D1SZ*V56qja$_e7JLZ1$`By|4FMt2yoN#>wRuCUecYNz6Y%fmvkF;=jot6F!Q zj5bfk=@-Yh-$I(ApWv-PQZ7OYEo>#;;0gDO&($%QJ1WSl80P+;_tbY zSRlmqVv=0Wb-A^)1@ZI+-b0(_E?K4%aCy%|tn!v)t$`Es%J)UroKDl)%IdCLXB1mo zXWEwGC|c~T+`3n9Qe(Z)=&$Lk+d_;28m&+Rr3@YIZRP48o)e8fw{bPz&h`82`1y3| zGBWImy-x=rZc*6z^Xc?6NM6VHas#)DbX^L}wL$_r_vm+AdM%-J#Ki5=LpPVCwBAwP zxN#Nb3d}GgVqCENqrs8oT^jB381s!$fsG|n8vmnqQRJ-VF`8xueeOZgB|(R^HK4mK z5WzE)S*uZ#D%z5EpV5}J9#Yz%NPVYIJ2dhz8c+s(POs}$F{=nR`sRvWG!boS84KtK zy6J>-I*%0BrS*QjvX8l|ZfKiyl403g=fZk%(6Ea4%4M+m`2VYnbPT_l#NV zcrWSuPn-GE2U1#ETn9&L#PS@vYMcGLN2?x&_m+hg=}mp3&a515EtEL6eGv|i&$s=B zuLk!c&szqcTHhb-6zzWC+T7&#N(NiOc?QH{9@vYA>(deR#$&BFbGaV7QgL2%Yct#_ zTENmqI|f%8w%fX~ZQHi(q+{E*laB3N9E)n4nII;-mZd*}S|)>AdBo;mKj#<)hfx!~tzX)ht$>+||$`1rcPj4k!;Khy62 z5Y73|<@vumWB;y->8+b%Vyp4jM;$#mbMEDPGTX(Xp|_68^GZn#g_&Os*pr^> z0`@POMl96TvfR#fF+(81W8_G;!|m!^+$2{03-m zd_9)rB7T|3NW(bE^pZt~UB)`XLLE@)H_5!HVd9Gi zy!U0&h$RiINkLh;mQ;a%riY6$D;l|CR2@@|%pQN{L1EIll97~2#{YiC->2tNhdF~s zzeL@sSBIiRMIXV_7IWV~a_8+oPzT(i*)E)eQ}=7Y(1718VLPQ_l9BmFIkD4EzA_2s zF&Ddz`udV;6nD0n1XqkZ1$?YnS_~3WfQE_z=*n!Kf zV)C%{iXy$qNc_xVvA0BepBXsXX2pxG?=ChqPO~|YQpzPd)dqg&LB&lvYHw5Rju5tU^0u5&wvZC%jgO72b`>u`^ ztJH69Op%LwT?hO$v_sup9WHFeD*c|-39_uyYi|=>lI`8+)JoIEZl79gS0XVa-|JV$ zT^-`js<#BzV1SORetV>N4y}>a2u-5y?$Pc2Dx>JvF zYFS>Du7~{V&}rWNCV^<`oxl~a!zZMT-WGNN3}>8#_m3WfG|J%Ru232&pq&RR168a* zW=o~=2PcF@wusv?_m+O#Cg$`%BWOih|ruMf_W*|4*Y zC{_jYByeWWmNeN_SZg>fOKq;d)uDU`j`lzrSl+fCK|to?0n3K6Rj^o&Vn}{$>ELQ1 zz>RkK0~}rRIYF+DTo_oe2@>GmiCpf3`l6vEKz795OwEZmtX4(5l&0J+TS_s{i=D>x zmliKf#{06O-a!Ow+8w4-G?kF?m1)}Hf$P*BOnm}1>Ng(@VW5rDfSpYG zb@O&#(qX`R^+UgN84)8*8oPN3V%1_J65@0?hNdBLMUWdRlxFqAmX&-B?;l6XwlsSE z0YDLaSbJXO{Xn{fDS%tW!o&p4apdZgO8Z+bw{Kz2Q>G4fm{y_krY9?OFb^QXhd2;) z+fl$%#>B&b5~(fMD+B2EMP|yxy~eMz#g6xrR7l5_th)mGe_o)fxW`=|Y;(q+zzyHZ zB7|FHNIhpi6~=hl#S;#avHlbw(7Z!{n3IAqQIv(n2}&k;7fhb3VNG+8tq{^VKjF`*^G{ z99O;wc7|=R@KlKJ#!iil77BJ~DmZOgDWI#a`ceCaV^DjVYAu+(3P3^V(`!5aVK6aT&9lkqP;@xQ%Q{%^$xSpWA-`~Pk0z;h8gBU96w{3)m$ zRY9iZTrVS!mehXR0G4({0feyuPIYFnc84fG3&K|F-HK^AVV04{wEH0I|M_A*Vyv#2 z@9y8!O6kBTFE1F80npn543Y34TY!A6e^Z&H|D=XJB>ix$=ll6}?z1^_J@NE3JnSz4 zK}8GuQi0~Go6_^`llnym_aQ&^_8kCDN+b^|6~luP|HktR^V4Hg(kn{>;&6Pd_ZjsE zq<;biuK-t>2vmM3d2nuZOZ0FfC7lp$QhZE-dDVoOn<4Z@2%Ea}y+57srl0W%x!!;v zr-#~mGJC)UunbJGIf5Ts9dk!GP1>}3GxrhHHts<&Wnr9i2(p#++1Q#z0}`+z_+aYl@<>eF;>aKuV<_3-QsXv_QeRAr2d|47*`b0#oC1{|N#?@F3?9-7Sbor^ z_^MSe|3m<&_p0p6XeqwB3^>f)&rREkLOP9tQ6_yQxEbf;g@7Y|3pJydo4SZn?Hbg^ zWcI)y!0z&eTX)B2N(TQE@Of1sXyxOiwS@YL$OXvi$=d z5`3*IXLVvXd!TkK%gB zGlfEU^vR>1)=b!Kv8{(+zs324@lvE&Y6{GeOTp;PUf*@=)nK|U=fg8_!kLE60ZSi6?yCfrJ{2BM+4gSW^xgA`pFb4L!v+4xNk85+B;DE|>fgZX${dAg^>)OvxWH z5|dpzIgUC6TY^USi4tHFXf3KX%vE3G>JfII*bthVQ|7Sa{$^b2|DzG(hC|a9)h`*< zs;93;e`hAIxWBFz{ooDoGPbOyWQjZA2s31$`nfx$12NSmVnz4~ zRMk#%fAjXR2q`;a#;rxs6ymqUiw)UVrhU|WUGb+PZAR8Y!1A$p&aduaxvO2`;+lg# zw$E$%wp_jqY9py&WtFNT0n)(NG_ec1=0=Fnk`7we`Sqgw_^~N)E*=TTId^_?lkaD4 zKFz$o3-lu94i7R&sAyp5TVYFPt0Kb-IAi)F4N}+6gCvA0i^y%*wdBd1Y`*PG8x6Dz zZisZvfJXTku1`o%w+hB78|}a5&p5DHA9d8(VYs)8xI<^QvG5`v6;=+q^fYPeamgd*SAD35OEm3NnBY7dC z9#bAS=mFgy4kY4YBre@loaIZdX^bVs6i5<6o7bOxeap=KWqeJf7+@VmhY5KEWMhd- znq?Vq+(M^fu*emsV5sbWgD_r;96y?p&!HS_(m6bl!!OQTqTzoZEw^rOj7S~kho*X+ ze%GWs|4_g>RnP1G^$whifgYlqb4pt!v9cURZ5_Xpr^Y;^1*7dhy&ZpzeV);2o-wQD zdkm9_2|gA|1)mKi=FhG)kF`4qRG>=PFMl~xzhVcP$hJENs#oC-L)L}lyjxTO_h-9h zlWh|3%hqj{3%U*Q%Qb<7gyni=(ic`u?pKK@bg!_GmdQRlGI%_U9jCLsNRUsWd)PsH z@bqPwLB$0;1{S!77mqh3hKZE{>IdaK6KS}o$l&zvDBbd8usA7Iij`TEn~IEYeqxkz zVI7_3T?1uUQM1%4jzks#K}P{NI$a+>nO9JJztSJ-fu>9JLK4LkIRH%9X_IqCMXbOe zEG0%IY|z1kaz?t2G>SeIMkArngQ%yGrtxPIfV``aDPXe!bn*yn(t>6*G)(paSpwBj zE-JR$JV(G{LQn%uD6vd{y%e;`^XT+6@`C9B8je`K>sd_XULq5zSMXsx(w@JGDB{+*nEfW;}_k^7zWIhZDxm%v-#?N3%;p`T1>j&s2!>dw8` z-8|)#wI`-Bf2o+%XQ!o5Nn~s8P$`vJc55iZW>B z?;C=8+&bM|*70!Pp_?6=T!gk0x>yCe%bYK7uRb2+FRfct>Lpa!LA8>keVCp$VVJ*8 zYn8;iMRbi@RHTW5)mIiHZ>=mEMP93C*q1mN;*EhT=(iV9PtCI|0x(nv(9<5#5*E7^ zB7~_~Zy>D4Zli;btRZG`6^V$#$cHD(>+#yWl1)A@=pfrwg}|jxa#${4DmmibTVH3K zPma&V^l7Zv#H;LOq{M%Q`KJf>c4;|Hps^)kZqSHZtmECxr+Yez^AO={^O0mqfvoxH z6v{-P^Rg1M-IOcpNS|z8LKqJf<;lpf1(`1ek}SXMAq~I#g_4gXzktr}cgtk@`PeQPhJU(Pg9s-{&wG=hTmNWl48?tWz1tSFV^StonX`&5B zaX7_fhpH4c6m=n$5doJ&vUU-#cfCIZ2*FGMLl`5$*XPCPpGC6{eWNMx;etY`>x(fw zshQnM5l1ogconz^8cF1-x^Xi&Fw&#cntXz zJDo-FJ7h#hM$r6F4-YDB?`h1v3cbI;d=0xGwGGP{m(?^J;#KC9K4I)MCbvzbp}dg@ zqqMyOhLR{sS*d6P!d-Q~yY$G!Pt$eMdZwAQgMIq*6rdd-cpgQFj`Q10ER*5a3W2g_ zeC@zeWWp5Y;2YYK=lLVTkG+y4Pp+ssJTBj(lNLEIOM0kJveqj#N>4T`m#(t?XdEj2 z9~>s+8J}`5)OK;Z{)Oqj*on&A8S3vZ@ajYIK5phug_I|UI9Dcou>)JS?$iD*C{JXl zI#G&ffYF%djL9$faU9-zMgO`w{%)|7{ekH78Vmc+#+iReM*QbE^Dm)*{|OJwzYi{d zY4B|SwyOWvapvDh8UIRy|2GQQ|C4d%|D0d=50eZN8}~n&WQw)^ebK%uekCT)QD^=1 zUOxvDLC*_SXERoBf58t|oWO?LGZ2pd{r>62mDpiZBApRP984|-hdOlN-9id>%- zGhtZf@vEf5UTrLYDjN8{ghJGyd5s6%=;Fd;4$xyZIz;;GcN^3smA=}y*!@L*Q||;h zk+W&IhG*^aHu3CW zk;U+B_U)=qbL*uXV-xB{0YOf7-X&O^5CU|bgtHZ>oI$=^X zMUrOfmqFc~Wu=Y}O>^65Q}J8OltB?^nS%^1YK@i{&(!=ARY<}=u&GAuy;jx}y^cQK zHYjrXWky?U&jvIYXF$@RfXs?|pno6Zlwv6uOGgCat)tpCcw-|WNJ@f=!U z+Ht9C0ZeU3w%1#3UQ55ZIE9QNVeD-?c^Dh!RnUoLfG4ubScMd$xMpRBDrsmb#!sx8 z%V%vXX3vMG?^;wb_N4H2WxISj|GZ>f@=%hY{oOEaacQao4;x>^cQ@Lm4g0yey$Tff z1$qHzRaK#(Z! z6dZ;;#_t_PLQS6RAtmd z7wDN7JVh~!B~%;Vk2}@{rq%=VET`K}fNaxFjH|yIGi%M=w1;7dBi7Gj=5>{a#A@dE zFmzn$tanD@f(==O&z8n1Kp9T!iOZqbje?Luv)@!?p~rrf{EBn)M50o->^!z@`N(87 z|C`DMh7u(Npv4+9I($1}taf!03`;A18t3t$0s`?byj(dC=$Z7r zQ7QGb@CbF{%Z5dTNPVtuhb1UOjfh{kNRmR|@n&V=JR=W%$J-Ut)8tu@iIk2=+=&EJ zh|k=Wtz0?F1mdtKVg{uNtb_qpD%01YuCyc!9cNaa?!%qBYdZtdoz8Res8|vLO-5*8 zQ}Xl;y*a;|J~61UoB(jr*k7|vTnYTSBnDh_y0Go}%+`>g5Gqj0u7dgP7(^nbX)r`= zftp+2&pysSg$VaS&DD1|z2j4~^Rowf2?#&OtCF)VCK*9e_|cM->kW*sSNJhb57kD)OD6c-O-yK2XUb zf#s$5I(pO4z8~xVe5{owd2wppDzOuofjD|nPbo?t1)S#Pj6ncyM?3Z1PHQx*$Hg9w z=R0?V8A*&O^gEE#Ym33>HpgHTb_Jc0QxPl(VQ+!S$zTdm(6U}y1nG*BsONQZVSa{! zSf@w?%}wX2Ys6dhE?2I%{t+X89L+#U7i0t9s7j5N^Yc)bQoCXvbS(F(pOP1$Uhxn; z%LE@a1!;u6wwu$3u|;4ZV}Z*=VNFG{H>@(y$WWU;uosks6KeO|1TA?+I$3VfGIz~j z@Ypn#Jrq;gi&ulw+P=%Moo|cF3f%gs7d! z(-7gPpfLTDZL3s`xC-Z70SX^vINvJ0RLzfB{ha@HlK^Z`(bj8Z>$lsdB=8)Jd6QDk z!R5JJ{VBDoNwCaGCu#@fcIw=I-|EyYK}A;i*c(7(8RNjcW0Rnh)D)Q}3?5uoAaePH ziyo73?~h2+24e~SAEL>|HB-!k$z>C9d>8?k(s~3UZe4a8U(sPYXIdV94t$q}bT{VW zC!UsFHksoK>3M-#0SQ~&6Oibp=@lO$F@lIzu_QR{2-9sre-x~$z4AnkDAME@{L7!e z5CzF8v-sp8lX6!#f7f)Z$Ii}D=DIJ^Lky3rxfxv3+dz)KY|&{m=<3Go>h^^CU*FzO zp2jIjUe&{4xyM8<=v$*buT73}-unV63Y*3B9Zur)Z&*wq3Vz+R&LqYadVKrwz6tug zSGD{T9rUj|)&JcK%>F+R#`}xyV)~C|Eu*UHzdZk&=uuVuPg$g@`Y&tVf7L~Qn;jLM z9ZXcrTy+=~#UvP2%{*Q8{)@WDe?9kq8Qo)M`A4R>RAiUNru`uYlJB!dou6_*I!bwT zG0`+hD|o}HK2oJnpq&yG3x4HXH3>eY|L%Ugyy>bv9w&X!TFKZxfSZq1RIdn>Q;vT? ziU*gBEwU@b^TxuTf*_J4u4uV0@>J<~(NQZ|xWt50P&LEIP72E2Vl9LE@Tb)z>ha)3jyW@ix@xWk<^O%p1*76v(3 zNphKXute6H=X_#dvC$zz5|WzArdb_w!2=SJqPN)%v#u{|XfB`$%XO6xO{I?Bn(YFJ zUI20|8Gzb8=F$giA3@$4oQoc2h)guaYgd7x2d`rkuQCs!lW8c1&g1UgPK9n|?(35& z1n+pf+rc4O#L^tUAvB1Hv`2P{L1fk!>r(Dzxh2u!Ap<;W8YWr2+Sb15#Mrr*{vAqX`tjG#4l z`}2tpJ`u~$geS*uF5^UrAf_LR;typy%S1QQAE4TM&6FX!A#%oOE`ltr2O=Km`9q!u zsC(dLqUnz~b4ZLcy^Nek%cA~+Kk-TqZQGG?T^VKtGr07n@Qlnqp6>5l-ki`7s~>qZ ze5(j&9#HmjC7h%r0B=*Hkt)Or?#e3`hhAzDEn#RuiEFU!&3u7}0#rrUGmbT?-Ul5& zmL{F0LuY`JKne<*6|Wd`ogNm+a-(T*wS^smaF}!N;%&0u2}D<6sXKTohw@!TjVtg= zH%dFFi*(NiJ`wD1WLtBs4e~#`S5$c($|04vh!ZfS)QRJl;(z#t{z)F)DCcj5;=`_z zYb`lt(Q~yMU%#2<`W55`1pVvdW%?_-c8yVAj&qX=VcJUY>2u6K`^W0gzzAE!O0d&& zxx)46Zw|2U+jr`djG6iL5rM(0zC!*^QCJTwJYiD1EbUgRKj`qxVtZry zf)hG}ARMa3Cmg&{v-$4eWC1oS#Q33cCfo6Kg@4F2~y65N44&xw1zm6U!WTBp4l{8m5k_%G)Kw=9Ajr+sBXzb|*bcdhR_%+)zdfo7Xf4F9=WWiS=c)eCH*`-?${Up@RH_-Q%M1I`+gqLZd(67aF__-QtY-yc8 zKQG5`(?K)H8N74BfTmwod-bz4N-LGt+3Q>$!{fXhedrV7{Ij9#s&7M1Lm_J8kCVH* zlbbXU0a52sZ=p`#NK(j%5f~{Bg>EqKBOk_qzhlX^F9hz*YZ?$J(CyCRbq z{eLLd@gH;S-@E%C3(o#~wY>j&sQ>z&=H~ui`pb&{_Lt#`qg^%bzS|#{OS$qr|3DAY zwp1j~6N(wC%#uYR-Cfh;yqpa%n99(i%;dkg)i0p2*L!_8pZWCJ>B>?tPjj6px!;T= zl~>^`zB##3khol8l0hp?RyesBuSCq+w5!)woxJ>=^}3s+vG}6h%N?{D3-udJE#fy2 zt-X&D1>f>#fy|Bop2e{)Dm5BAO8wga<*DAjAY<#h^CSD~>;=i-8CZQ7%rw(U!5t@c zKMDJXikIR3_d1~k<_6MuhJ9n2+>>wn))qUSGeFT%V|lNN?0*>0&lf1zlHjZn%?29w8S!}Rf)Y?nma`C71yL%!u5@sN~vF5n;*d8+BYw%j# z;Cs?Ah3H1Kd1%dHk<8%o$Dg3V-brgEvpXdvX@Dg+nGsJ0Un(j!bz=v@=S1x%1^m*h z+LvVs7qUDK1+&7>gw%C+75Y?MRrrhtqZqD{vinfWB z&W?c=DxdaEd;3&u0%9&^Wt+u@_p2p*&gjSSM2uKulj0XQ=F_6`b~9xUTcq3VY*YPVgFBp9;mQ@*`aE?Ol-lb&Ur6bhuoK5ti- z_9vX2?Qp>aHtaDhQ>`C~ie2w5{eHhn$l~ixAJ)ky<#Z)WGetv%L`x}h9Gf+R^P#6l zr=U!f1McaT@l^O8pWx=OfvnA$Y6!sB%SX_cvXizn;U0eBX}&j1J(+`}0e*y2U*TM~ zCD`Y(nuwLf^CzoBjz=|MLkn9{>|?*oX_sg}mm^=FBw-QDlsU$OEh^&FLU*D0IrHYL z6c{3nrXASd+3z4*o3ZT+9UUN_?1qy<$%=B2FRZ_Td&bh6_dhaef*rgx$cJeUEvm5T z4tq61%=A?fe+qXADqelV1@5IcB)GN>zeFemb<|weXtEc28@*KCyR;s zxpa4P%f_RsJEB)BCdSnT+4!8V=iX1QK2-@cB_NDypxyugC3UcKDOeg83EcZH+ruStr51rT?bB+!~ zwdmMojqmSn@RxJns3k9+9{;|g6c&hxIF#0pn87h9+G7PP3Il9ZV(-KLevt+@7Y0FZ zw>g(vTXX&C#Y}c#LQyxDog7lEt9*4H*~I93+r|#4&_f<90SblkLa$A}^_(Mw@>LhU zCp3n#MGK6|SP^%RZtw|EE2i}q%bwPbsRt7vh&^d?^O;Ok9C;99cfnOofL`yAT9D_h zA@y{vTDq(Q^e^$aj$Dlxdr}Bwj0||j1f>EQQXEl?b#*j8IM9;dO$7H?D)W_3POZ0G z1=YAc72-^vNg&hAi(n4-l}UL2jLO3DhyDS2F!obMP#k;39*WXL9^p~-PX--bLIi1t z_1sP6{jhOC+kyXS6~0zDJ_Xmna|ON{q^`*gHYR{|#{+6%bV-!0F=-q}g7GDj35%!! zqwb^;cpOKhF9j*Rh*n1rk4u{c?$3leHe_s|9wu(Rd`CgDrDPU6Ixui-VycXz)X>YF&}) zdKZNIzGNVQbBkTUT!`HURvizM5lFDgT#??m|8no)HF!wrq?6^-$U&143UG>D>zMOg z*P#Bssv`x#BU2OhtEn&rq2TmCUZo@h>Eud_Zx2>K*P4#~_uEm=;@}7fTKo{g5?3TH zH6DKWatq?plH){N1Nv0jU9%{~JL=%@EE3nPp><@P3gc^U_3TPFs2DV{h2nKC&1x#( zB6Zs;OB0Nw2Q)if6GMf02wF~%SQNX6LX{>b`-ph}K@P_+47&)6HcOKU5MY;}xW69? zdM!o!M7!K8<>$i!&aMNO6l_=(5K-sDsFaf`?FPTH?B~PAa{TrSR4Rz~gqz-`9?jzE z>aNCDA|oxULkUzerBGK`*Vau*9(Yplg9qk-wQ6sbKsd?m7>ILnlu2O70tX44XxK$4 zCE-qy4Js@L>-W#1UcXZOtA_@RhJ|d3~Q)FJ6eoz`yskk z!Jf6l5OJ#{));_MS}{kBwk;2M9wc9is}v_-aJ}LQ4OqHG$Kr`-{#eLu!Jz$6CCV6x z+~tP40Ulrn|FP;$cUV@qkJ~^Y_kFCnqMXF)lO(JFjHVK|tH-9rfDp>;t)n`*Iqb&H zFPN)$DX3=P;rE2QbX)uWNNF(jJN$W4&b9*+bJ0Wa3+QC0e^w#MpMow2!epA;IB<7B zH&u)qBgaI)_QEYncAJqw)XK}U6V>_q(s(0#+s{}-IHTY7vH9pfLsO1VNAv4ja(8Y9x+*aRNyKJYw#&gG z>uLJwiaC=f@V;cgfIX~#BiyUkZ_CeCez0hOFsONib!+jrjaWp@E+rgRIK*GsgYC&) zr|{!TD<n1;ABD)j`>za#=_NM@Mtv4*AAl}BP zTsI+Cp3IL}%QWzOW%VIDo8tBiy~VG3eRk3Og*@XjRYtv}goVV0!gv){lH&5jKukp} zYPn)ME)#FycZ^zM>mRt(My7nQ^4LM23jzc!t>U9F#&yMD!*;(2tC~9C+N^+TE%50= zhZ8nm*0$K~8uj4SZcW6Ux`$cg^2wC5G|@X=mJctFezF|3pI2wQ0pP0th&vpUOBMg3 z=OGc|y{)jY)lo>qr@a;earx+5lq^}FSX9!JOFdKcbtZ9c(eZgXgVpMpK5{tq;au3~IqV*{n-A`Moo zzTejeHZFfiV1fKA$(4r_LJ+NtPab~2QCP`ctl74w8Z~ujZk7k)*2qWK=8oUp{Y(J) z&(Xy3xxbBzp1-kqSg&&qnXdihcf4`aC%j`Cozu;sSRpMz$2iOn2fZHO_sgj+nUN&U z|NbCxo1Jc|grcCjfbJBL@}Yn-^_|a*Hx==m5`Vw}mm~8qI(FL942Yz;lQ_9T{>fRr z3b$aLkwQ+;A|94ZdV#nMpSz5?Ie?~(xpNM9j{jzqxFnedW6s9>@XfQzA0Jn`DLEEY zuxA>#UllG(9CfR&t(S|XB6P$K)ZXa#Ujgn8Vt#1nyJ+iP>uiq0gDPLKBk<*>a-B(7 zO@M2%*v|yRg*7UOH)7vwWZ;9tOYGU;CGbZ_cCGgwCMo|0lF+N4Z#K51p}s?laO=zx z18Fyts69c6fD2&W#E~+@>_-60x0l*MA#i!|X{4b`$Uo2-=5BgAeZ&v?Yjma%;q0K% z+{QRDj&xDIR!sc$>(1re)dwwOfFR8by%j^C3 z{qkrf@HBN9IRq%2hn8xgG!ySJd?Sl~L)Y-Fcqmdkyys11)%E7%rSgr8_kz;fbEH7yBj<~YlO0@7V%&zQ- zC5+M7hka+A$hRl;>HF;vJed$)PC8#(TO4*W?~2iL<+#%WbToHz&30M zG+6NT&EH?@Rvlu}mp#?cU-CxS{4_I{_T9aB9lQc87~Wu@e%o>h{QAlJnQKc}Y|&Rv zMc~vizfX&)4m$JoJmI(G0gUCqaB^YJefL%ai-}Z9M6h zGNtMnxT1~g(SqPc93N1=)i|EXPf~uTKE=Izp=xo&vx)Cy^*JTgEFOi|jJoSy@%-}3 z6Qi@5asy9EBcDn7&2{#1J~tBmQme(TEVKW+EitM{jC#}M_~6luJ5HRihbgx6n`N0$IYWiyVP(q1F$TbLJLNI|LkcXXho`Oyn2dyznJp|_ ziVwZkV39llMWTcm5c_HxIR1!|x!SiX~sYkmwn}2v{P&MI{d@6FEW9COHiK(|0 zqY|p2bjqT_9jAY~MGMO`8tr=3T$g_`{Qj9%a!PDN610Magmwj-LAPM4C%9*tD(Qo7 zAZvKzb6|stE{@p^W@`lU4HwTer)g9Wa{-d@3LLZGB~`FUgj3ibz;d0Z%j8(<=d@uKnFp)-ATWf{(`zVTRY4)^v7#qKw#rvqt3%OX5xVhqe#lA?!hj#i6%cX`PIR~p8l?xt zH0qMb`9j-)*^1|zZFz_%kHS@@p>LVLts9tKc66WuX-=nOg!&6G=r=)Unu2bPtdE1h zu((!~(hqB^T)K^gwl0>FJjUSI!KewNH|G_5D2?NdckGpucJW6H`nqUWzBmA}-^ta4@)`Co)cJgQr0RR#YkZ~8SxwCgDdO8`}StEGR z4XtTi*y9L8J*w6!y$N|i2dVZeS#mDCRLzTZD2_PSW=_gBHgYE;uUb|`I#DVKlc^gU z!;iva`gbEm?ml!7+G-e_h7E2lF|{)?1_|(^-Lyoony8s^iC11aC{1; zhy5u&xa~MPGO&Di>nJ)j`vILy3PvWc5pIyP^e7Z#%8~Tw0f#rXk@P9`c!0t>ax6P8 zrXNP7+(oc-N^5N<*lmGIp}%C+(k=ImYm$Gau1U`GF328qq9=kn^k{U0zKKO4LGOSg z*fC@9S%v6(>S-AP1pUB>zo$|1epd!7PmL^%ng)?k*ci5z&yHJKqXq;uL{EMJ;Lr)H z;dYsZPtV`}e7JQQ>@HV+9vn~JD}-l#u9p7(5cST|ok zy=Jud<_Fb=7gvWumkHZba=DlQ>&tMc_mU#i62X~x$tXTCz=$R!-cI?vNhAF7+8P&} zt@(0tdh&Nk!kGMLn)aVL=Gp(p9rOQPL;Sy<yOz-KUhlxX4(5*J zoBJrzgOV4VKm^_lJi{;@1qxQ^apluY9oAbza1qHwOWJJ=tn=~L@OlXkUQ0(n&IgNlP}+GQd!7S)_yef7WQ82Cs1_A zS?*D{BJhpx&5|o61MC|BE5HWE_G{_o;|j__EfpXtdm8C#eOHof-cS)o5GJ~AsYEl3 zHKZNSSG#&~?zGB}ICwjUUaxzZ=U=OFvF^;o(t(1(3({AUc!^b^1)cMtF5hO(8`dy| z#xa%bSiEz_5uT*7mW6_*CZmje(&}I0Sjh)t6g~m0i5jvlwMKYSnRDVT5G`^nuv8YA&Jvu|jug z`j(I%U919$QoFAe?8WD!)+4QTu{0-HP1BD2B@pC9Ex}1n;3Y%!Nseue2ro%FKb@lk zQ}me(d=DHGaFBE*-4U=Nq3jyiu2j?v7lUr3RW!Ky*&~`2+Uiw6A3S8ZQqG(~g6^Rz zM%AFn=6w!g4z7XBu%ua)!4Etl67chqWN-5Rgp3h7kn`A-Y`-pz&l5dJ6~(MH=kreMQb#?i;-C( zG${S|)+_UshKv}AeC4urst4$)WEpqjL}p7vO+}BuY0pwPgQGs;R`y;sY*F!C@+2x= zb=r!GtCN?ro3nevVz}VL5nvcjk!IoZbNYBaJi75-(Ckn7ACfjYU1D(q-Rx9ua_!8# z@Pm2&nv3P@Myu@;?_u<}oDADt+Q1zXhCBVJYxLihK|*o92J{v0=kdEtH}HqX1%cnN zBh9vu0f>{!_n)O&gyFvMN@KJjwv4Z)EtA`Rn#+xY$QLBw1i6`2$(X|Nsl?wMK?uM3 zGHU>!guyiKvNO1j{OC?9f=iWN%$N5BD?G)44#Ofnp5t z%%!<$wg4a-Kg4u377chC)s3T_6i>>k%)tlDqem-6p>5jk(AqNmeYSbvbhAUP<>t>c zEUN3#^y)bIF5c?aHH+a=TyNg!;4)B8Z^>N-xLL2>^5~F}J#^B(?-iSwG6(D#7NI(! z^UukqR>k`E6~S52p2=O^s3m{35K!ol?0a#1y4@!_DP2w-xV7EYlm$-=xOCb{*sM~~(u+22Z(!D&7TI=;PpDT)<5IAh6pXypl z&f#5oY~0x)qN->f(AOD}6le)2e^Js8;%zYgBqK8!r|yCg35ow6m6KYR0Bfr9<*XMB z;tEbMA*l}WNhp+Ho=Le88?aMh&vIL#!!QQk;9oZTmB;`=$r7IFXh@~!qg;uY52m@d zr03Y*=*2IIUzFr+*H+<`%5L`yLA(eSw+IqraW#XPqtys@oVIM5^Mul(_mA_&Gk;RV z&;}*lpY{9o-xb$#)5tTBHIJ)ld0f1&mJY9r#R`7g{dvOxFE=(t){< zgeFEy)|f2CuyS$Qq@l&9LC07-?gXG@K@%DF>PFNLKg)u`nbR(ALd-(2MVR^Y#pMHF z{8E>@Y%k=`%8RTwY%5L>eSh$qZs}?hmfyUvp>^7gSQ2kx9TrQhX7p`G-qO%I&|BwM zDTNj;{l76xe;tym5hrc;L~nW+Pr{Kj$U1DlaS9CDb% zBjk9zzcgp}Rp`9$XKPyJ>nfy#hK*q=t_(tdg0_cIO6t9dNi@+lWA5L ztN}?@CJvr1Bw>PV_UG&w>L@&OY40z$$Qs5KhK~}hW>%7VBmNSe5go;tNu4>X^TlF@ z_-RFk9yorV3IG0vc@A@#1;R=I>uBkpc@R#xZT!Uzn;a;#c+Y673Ds6-^fN)tYcm<_ zTM{$=&XTC=xE&GEO(LC`jR0OyWV%hWic?6ZgnxZZe71X8OAd0=TyqA6dfBI43ywox zR<6{|^;J63?j28KO!?#*A7GY%iO4`Ou!)g0S3%_U3qQ$;gpawDB#~vNJibEDP=4g+ z3D_{DIRH^d9Yk)13z-0*?3(4w!+R{09u-nt0x+~q#!hz%vckx^J-pO&rv=B=bWO5a z4{krnZZ(+2_rsiDi!JZ1HG72E0hHFyq&^a#pR)x3fE{an-Vy`q=ftuwXC)M z@T*DFtghD8sc|V>8e(xMjAb!p&V&vVE1s!Cp>5T%R>CF0>T+dpikqg;H|lw?;<}m_ zFM$6!4CI8{#MN_-ZUB{y=Gc|nzZ@f2#Wg*cSSY#lrHLBnTKoOh+KEh_G)yAMb^s%vJo43A6eQV2C4y5jB4)$O*q^q6n_d#ho3eBL@UD>QmAPH zvh;^?^SBMkF1r1DX`BHGVjW2b*U24i9^1m+m`SKv4SetYPj+wl^+{Ol2r*1RS1P6q zjP(3koUdsiyx>_8jtO54b|^}I66)c} zvT-VUuclbaK>`V8v|Hnb?E4*}$39ZWh{~SxO|ZD#hxDj~Rvcn~pI4dd(p%m=0`wTJ z7ZctyceBH()t~_`sH)V&0#{i`d+kuOD^3ZTq*;i9_cr}sxaC<0GL1520*;9}7Phr9 zW-eON7+DPJ{MqoOx&Af-p_f3EEbWIH*p4^uB1~A>R?dEkyE@K?6VK($+*>WZ1D^fWKDz_{so-TZ$fbrza@Phw~ zwzmwbt4-EGad&su-~@MvjRkiN5+Jy{JHa;Y?(Pl=8rKY#)U`M zlQ=6&syx~(PJS#oHyhg)tBgxg8NDrYC79bL{0Eg{71ApQxVgR6SrE3`1A5^~@PleZ zuCt0wVQi%g?fl@;Zx2ABP8-_2{uZ7JvXNkb_X5mP+vMKd0%Reg;^Ca4gXm8{;)(?V z;ZUi*yZ1Us+R`dqtKe;>99N7-61LZMI=-`B$g}D~DlVJE;)i6}Vw=Y#(x>DWmjwW^ zwqj}H?~)m>*Jbvjn~s9r1oh8erN!fWt9G$=+@u+kEO%EfcUp$qhaoW+%^%rJmzc|T zn~L4S{2@W4+dZVQzWh7xL}AIWyA=y0p)!}9_uHCk^2Imzi=TsgNqvR;PEZ?$GtTla zDtf%{7OQB4Nwa$R1)oVR-x4qXE?N9LJ_6@|i;wUR=D}ah8~>mtfcow@{y{?ci(dU7 z_Hq1YBm{QOf7=sYq_b-K*JqJ$T`aY+fwdw;M&frx&p4(KJk6ikY7>V0dzoQ=BQnim zxVc;@slmLzl-Z7auP-f@LEXUv=JM&QmpnVG&st~F(Ucnwe7!a1O7}H25z1bV)!X54 zG70@aVQHH(>GOaX_d(r-~pqt|Ls#UuH-+Nan7&^;W*aZ*3S zB)}M{0=E|^v2G*4z{KFK(g%gm{lAh!h%V=dFa^fUD;uMWu=b=V!T)48b`fbEbG@PP zqq|9!ug~QcQ|db_4%=OWUdU>kdmS2fNvq1q65gGxiA@v^MjjI(z43dL606ZA(YJGb zp;#Yl*kzW%H`V!|`$|4u9 z=X_QnEhQFm1XY#vrykatBZ7poItD~?N< zXfFoP==VuGbZ7nTo7J}~r5tuo8*%qb0GcK&>7euF8HNLnCgERf6U-(>+JPu^dau+( z-{WGw>J>`He;cU$vxA4#h%-dFXYYS|25v=fC+4$}y+hD@uRf+SYoI5dBr;HT7uaSN@u`UYC&Py?6S=WapYHxL#a51rc2HE&=Jaje#Q4w#I3 z0F|_cH4`j;6-gIEhVA9TGB5#;It|@V&TsppcZb!-(omA*z68#Yc|M%@KSFzNr%;Zx z3TZr}?G(eTYKxPGY|lzzKFGBDYpaRtFq)S-qQRx589fRrcw_r?%Qn6OB>x~_hH~+g zp^I}_j8rG-V;*?7jN)dTo{LwO)sY)9mu~Nt$)bjs|HQLej>?;|#1Gr0s>@z`b^YA& z?4eXt_Q`V{qGPH0jlPZFMY}%`g5JguL(WAxq`-&aDIvQBiX!>Pxp*%hi<_*+2bAA7 zQ$UlDjY6F2p@hX|Qy)ww9*Q7cIi2@TI!}CU5FO9997-%t%sl~&G;ymdQ4c^wY%WDR zO)6hQbP`I-Dz(P-ty;C0$Qqk2a*CLRr0}O+VDh`;d&t#JrId(R=VHc-jaw&AI7Z*s ztmG%x8%}m&5-Hn=bxbyItvkjatgKpQIzl<}iMN7C8VSjMl`}@kKP3DU=szSj+N6+n zknyZOd+}nEHqT2(aZFjKyrmI(%FT8Hxvm|v%8OA9n#*1W0U4 zaw73eyA|QOot#>!jOuA%x&*#Kokfg&JrWI%3IR&(6Bf_E-c2eaX+qHbj@Oa=N+}?l z%wr99r9?qjC1!!1F)`}XejKUfm_$a5qZ5E*X{T5>(`c^Jo*b4qd)r-!b$Z!tZF;j| z7w;*-=4+PUmbgTka3`j60DPszk$<4^p6i zQLAC?rRaFMZ>J&q)&K{Fle#^aqJtTli5{j%CkA#e0*^TXFKV34mnZ%WlmAmpEg4U? z*Nl5eew<5bP@{wh5>Aa#_b)QCq$^L|xjx1e%Py_vjLK2*&3r5G5CuGNKj21lvJ_q- z3_t9*Ja%DIT6T0V$h8EVZ;$12HqGcvdCxttL-$g8A)Ls0U4k@+80H)337y zdF#Jfk{?H3zCF&SEI*F_N+Nx~@7qMpe&uU_e)@V#yAjy$`ZrjP>+cpEas3xK6@SfW z{3mMr|6sX)&`dx7Z+q@vqjLU%@Hqctg!eyE|DPWT*tt0VeHOdUup?Cq`h44k(Bneh zEL|^59SizyL~6+H)~{;|nN4T<`Y6-ouCn$F6W>trXCpy17AU2pfn;emHBMOf2-(gt zZ}{7=jNyTALfEqzFMz+8ru&Zxb#WCu6p8{p}lFIzxCw!){`rNX|eOC)@hTK45G2OsggL}vBGj2C7aCdHF zRYSeir1FFCESP1xrGv{wtYM9#fzOK+Vw&)vX%^Cec-_K&2xh1f(ZIRyV|n8EJ`&G{ z%4twy&wJ+Dcb>W2ra{NIYVqc9u4md13!O++%!5?6ktymkvk)TidhJWq_L%m>gA-9g z#5~LRx}WPw;!VZA;FV%bP)w+0T=36Y(&_Q1$IMe3{mUj< z?tsfAxG^Y8^^2qt1xPnII1~I9(Tt7j$R+EUru{Ob$r~9x3P_95OwEzoWL|k^_PMri zF-}MN5e?ck$r5FsYX=Uukm|VTe_SO~&f_eG|pcTC9@2 zIU_k_9CI;Hx%FWBM)({@P4WG`F4@=?9Am7()C*{?FKu!7E?Uy|p zmW`xp6mKgLF8gRXL2R9RH2Oc=_QSzMjy&4GC~;u@C5qN`b7-1}{KU8tBVJXzs!C;x z^Rz3cn>m)oRK;kTJi@opGG*-raLDMkC~z)?SmM8Pnl=VR<=Q8tzQYCK<6dh z)HlW<2yrl;e4Z1aVvG_kQh;xkO=+jX?JmnXcJ=O~a*RjRxtF&FUh)}ls+Or49~SPe z+zDiyt=Ob1FvHWyNFO$-3zjmM9&@c6H3es*_#VFXr#wI;Y|blzgJ)(o_Reb7I-`WmL)=|K4n|Hm8Q7Sa%kaH4J}ng_fO|hvfQ+R~?X_JFu1S>vvKLXQrEk_#=>%dL7KC5e^Fl zVT)K4@u$i3eH-j#4>8}9VHC8pEacOb$~~#dm*k7XNlOa@%wOS|6V4(k&D8JtX_{xR zyz6xbm8KCoaMVtQC|9DI5)hy5$4BjkOE^@Cj zk>z#%Z4p(~5Ox!%<$tX0+AQG8fjb#CbF#;@UomvBU1za!q0vu`zk^&=x-*#g80E=3 zn#jMLZ%+r}Q~pE@o;%Fo`yN9-LqGl_3 z?1M!Yc}#DvIX)GyYimD@mP1`j0qYMjd}-uUUM3c`G8h81NP35Kn{+Dk@q?I7k`RZ+ z{+W{*cgEWq;(d51w%9OdgjgEE-nMh5%VX77Ljht1+AI=I;dS%czIA0cBH5MXd-v&& z@*~oUH#agPfh?DL8^&i0fe9|!e?W{To-SvwPEshb02 zUpmU?8 zV>2qttAR}x@d<~g@MTjfGp^;F{!@Hij;~z!fHXmq-vckMt81EWt4FIXxQiXDXE*KL zj}YT+AHID9o!> z&TRkIUXztEYDdJ4G3b>2VsOtP%~ET7HBN&D6=z1@;h*3_hA;F*z(l&EIq}C($GuuV!I}-TZNG-?#fF_2ZYG>AxZhyxOu6D8w7?BddOpfv zC3CL_%NzN(v-8a)n31Ike)1v~!Zo7%MMINu+(+z&&Ow6`}s?_NAYYkrSsYTR7q z7lp_(!84EJXPfck^JcGy&$g6Br)o%Do1WCoSw}6Cc>`Cq9zF5D+^#B&k~fbG;x@JO z_^P&Me4o~%nnIkr&H{s5Z+X2Hux27AM#!?52opnIuO1)wjVINsQlyfY&&u1jLXmb2 z#h16_NM} z?!8?&*LxLFESYY$g0(v{S)-ybxyaa!Kczc4Itn}tX6MRuEPW4DGm8S|ug=B<%KVDH zpRp>DOIUaK2qrV%h_ zsPy4c4@1<*)f*iH@3eXa&&i27zv7qvQgFqnF2H4yWt^88BqhWaqg}VhrsM-p3I!ar zLE<%6&oWCryS~H?9Frlo5?_t%N@LGC^Jm7-zC~)1{0WnLn~~9sdWymM)wAYfGR=Ku zaG+5BhcIP5W;~Py>(m`w!R`Vx)PZ@~*~e`%+-}fy#6t z4YKs7qhJ|@beb{{G{RnPlG(q`{7zmHqku9KOoiCVJRDdir@?Z9BJI}Kh%l^k>FGk*vc=s}8F4;29w~a#;6IF(7_zCxA_>}2UugQ;;H*x^=Cq{g z>QgHs8RN>o3i`HRWD5KJaGOVg#hBen2`!u?$_7tQJ@kutR zN4xh*`ibn_9FQQ~i*%DRrt`{%pHD$|sh-Cxa6Li+E2Rzfn{M%je!MX@!fjRcv%{t( zyno{J=j^cA2!oNLVQxF^{ZHm>NIo1|fWCN1KJrFj@i2~A*tVD}Q+q4OZ2puH0(S|? z|7kY!aA`u$4ys7A`ki%iCS)hOef__crpdlp5bY9trXH&K>g1XAY*E~>76Rt z?^^+cJ7YaGbgW3R^D?(kzx`Q$1jDpjhL~=$B_YxpRVc5d zM`0hBZL*H4Rbg<~!83}RLkKLNo-A7~D~S(6B_IA;DOXQ-44ilW!&>4o7{u4(njghh zR{skaa}YX_rqr%>g~l#dB@39buZ>d-Pb;*{7hg6GjMGP?V*a9rh%T7^(jHnb6QqoT z&OUNi$-8)Wb=Rk;CoUY^~&Vj`rm#LK>yEMb&_=rIqzbLxi9^5{FX=RQ`vP zzK-BP%4t;H!L-!z2D*lSv0mIt04be97_FQ}I{_(XOM*!bH~CBv#_eB-AIMeO0i?Kr9|G$3GB0$ATCL(zc))XR2a zGOg5utLHT3O4KH(WRHqduO3q0nI)IZjw^pWrRk0Ua=Y_YsvzL+PFk~TtloGL37eg$ z$uS5=*K$;V+>l36%%R(8Sz%#saU& zbM$2h4hRomrqerC54EcoXo_~EWoOJkp&)bAiw3TC-~Aq5#9eJa;6He}?Nlyd?04rX zI;oL|2#F5{VCxmz&&oL9QW>=5S%@H$wx5mKhFth2CWTC@QejL=B#uSVX&7 zYcjvVzv1&r3XU9(pOE)J&Bcay=f@=>+uCvWO>r2pn{j&A`^$c#srJ)>wkD93DnST- zkIK?PK8^fGymru6W^uJ(2en)n1z8md{RFJKIjR#;P7!x&;Y@bG@lR}+!_t{N>#JbV zSumLi216Z2(Q!rVoOL49S%z}x?+d2On#OVjF?OT?l-HQEjZa02_m>` zjIv8D5mhKd;c^2wk-fN=QKPM%1CB1nNk4V!Rc+%%<3xDJ3 zCX?+(JKn`1vs3k=RD;i$Z$xmMu_sGVgxf2$43u9sj*HhvH2t4fRyGYPEzUGxky@M8 zW!N*WD>c&1S6No+#SY4GV{iOqbqCrE4B;bGxlYf!tpHA4-@mI(b053A%5&5C(WhjC z#j(A;e|l~zNy$lXfS_HbFISGA+Bv>BgxNA|YQPoQc^PCziP{YgR&@UiEwP;5M4Nhe zwZ#)qNl^_zw_@x1pU<^D!o>G@MXP-sL$J@oowI|Vw4cu#s^bb2-0{hDt8fv79ZZJmP8HUEq=oYlXOX<>~EllZ8kPrf+4Ll1Wj;tg3B+C%QXw1M*r*kOi(DTbP)PJ8I0U1%R%R;uWhNZ$t zx6jG|pg3eX-Kj@I2>>u)o9{975n5_xR8dKhd?+*YiTQtj>yx*&3S^&eUTkDFZ~FN; zyzDQ|wrrx}*0_U(Vo@~22aKC$_^6M?tys<}R3CDZyb{th7j z9jB7(zr?Bh-+2)KF+l!HlK&eZ^RWHf5QwY{1v{c4^iZD-FU7V~WteWg@QCD0 zu;kw0?>w-E8hha@!4zrx?8zTeC{JBw*=g(5=SRmR7@^Tc4`*)Wp4rE-N6Xw9^A&E^ z`>t(4fpsD9U=Y%L!sAs#*-p;vu0-fb`nMU;1=7W+2gBgTT^)6dEa|DK4}mJW5>-f z;XzyYuu)ZIbfVaHCpUHaGGBrnRuNXyneInROxZIl@2v83;PkbFTaJH)N65&w?4>s;nlbCi{O%Z;zfhDH7Dz*cCXDhcwV1BJtR{( z!q)J*^u)LLxEqN!ZZ9hCyPHL1KD13Y3suqkSq<53)6D(Smt;g#2bqF*EGFg<5Z`WZ z28d80jZs_JU%%}^b~kPd-E7OTattq`=ZNV(Ub~lU%biP<1SN?P^e?TEc}E~aUCe>; z%#fjtPUoo1_RSB_1+4Zfki*I*_5&h-T6ELb$4bqZm&%W2d`O%~zlvJj>{A+OEIq7+ zE?(aECC(izj_u%LpdllX)XWc-4M}u%l@{_jQ1Fp`s<#uWFL5|IPi1H&4Cgq=3r|_2 zepiQ>GEJ%)X4>*{a;O5SF6*h_iy5^Q3(>NW**YR*m)KtQS+GVD*UD{_0eM}BweUUWcZ-ajSV-=jzE;BRdu z8Q%S&(_aS`#ztdzJ-YsWaNM%4o^#Dh8txj-EA?djhADfWfBDa74ojQ7I4J0co~G;ZG24cnS# zcdX8W?BQQFwP5L!OeB||w!C{fRxeQc3vwM0cd2rQl$ZJ}@LEdhlFd1-tSW(u;q#@I z^|H&x8u9v3^E%l>-A3q`peOBi+7&CSdjt23#?gLgaoT3wthyqMK0zE6SqCm6?S|Y!vtZ+5wY0 zFQ9B_|2cRLPl0fRF*=exsy1H_rN(?4KkC6dP*VxA98R#;STm8+1ETY1%Hc@V8btu^ zLety)`BZ}L-L80^KnzArDCv-1`eJF>6$q{w6ci6Ln1y+O;F`gAkL>3_;?WEOC3*mc zyEYI?HJS073Z4br!@=*uLRt@ML2H$IoGg5YG9xIM5vMv53)d1pzZku(&d>i>XhnwIp;*eQIY6E1Hk}fG zHn(<61$Nd063$cGK>g8&^YKGFZ?idivP@{~?4?`+5ho`QWG{|$9Dw3e)?tvnI5K^i z!&Ti*Fkt{CF)V<3BU#`;qpVk?1t^}{ru2D1t%Y|EV_GC-&FS@dayPhRRQa_DzNYZx z{^&el!*NfVpwZXd!M6O=Iinb_S!j7ByFzYIrysOvm0ic8BhE|>0QE)JCz|rs@bNy) z?t!`a zBp#e+6q>9&hTqO5T2j{J*d5O4Z7yBGtn_PGi3jPLh`N^<5YiTmOkgN zyB&uQORr__IO^>2PnYUCm&?@0;iysD0yP-BeNifP9Z^4|6`~zx;Mqtx6$a_~$9U?$ zvl)!u%;DPei%yt1nN~9-aFmZw^_DyGerz*!)ouS4b}^rPV+Ft`aC<;}K)Cwp{kmsq z$R(xC{N%qyJkvaevQ(6;J;10XdT5z+d3cs!P*}DNO735ub`lI`P2VnAUXAepqaee%a z)DKi3WO;J-G$0TxKZ#5$qSF;xf=jND=ho^&mRe<(blYbkk}7-`u%Y{x+}1iS=B>{% zgg#pX^05ByN7r0$1BdHxK0SsLnXKDgUrJiXy9`v2IgkdpI_c9nd0sj@)(keOPvT=^ zZvnN3GgsOOFR`n-`N(6H6Cw+IcAvdKi~^ba8^mN3VBdFhRpjXbY|f?~iYz4uNri@1 zI(|L9-_`1RO`uncs-$gz2r-iJG>{^yFjvb!07yJ3Q@oR>co2-YK{m%{ck^SX4u&g# zfj-Aed0>|(_Zx4H{;R&<-91D5b%#$+$Gd#hd>^Fdq_Je;Dv{qe5mN9$b{SoqQIR^u z(&uh0ODi)GzC8BubaQ+V;zWwmuRyZ$AOW! zR}sW^HthvnA_A1E0Oe!fstXi17nEZ zKa|JH3JsL@?37xHfzqCE5SZW50okp;K9Njn-jJIZ5XvB#>Q^H5sHmCiqlbasUYv9} zpIRscL_OE!&xNO2^DC=^JK;!!S@>NhO^=`++mDdym&3DnN(T|LzL(|&Lc!6tcm68y zmqyWe-&C&N^45DA+?lyQv{`6x%Cm(z^?kRL9XM})JrGg>je|ROQd2}O7d{uwufe;5 zdM*X#G?Eisv5^(1cGI0oLpb_xxaB?%$9M0wFO6LSl|3)#^8>p>eISHZadYPKv3)f< zmQslNmHe@8Oq`Y(}8|9^n|za;s; zK|UAHzxR)1Y}yfWV+_4iK6rJ&3KFYq)f>Fi1mydh@Vi0Ja6~8ZWBrN3up?UM1u~4v zT*t0$>Z;5zhhcsD{W%Z^;Wi62BvF{iVR$@h;Pli?_}k6EP})i6`FiJrUkqtlV%C(; zW<#5?5A7Q*^I_Cv!#DZ6gO>q61U9|MCP zeK!F7e&?L4#`g|`nMb{virK!j*0H`No*VGzEar+mUelfljp|t@LM&_>R8dHkXvEwT zmKj3=+cesiJ$*jc zh6KKNoF7FkQ^Si_TsU9fQWl=Inm&Nq8)Zh+0WfPPszI0399xwMUQ_-sV9FD|pA^n& zCZx^zT5TcC(xYRft%xCFj`{s^l7Jp#A785em~E9T(i%+GfgP4{ zitRn0+~iE0{3vFVltL!#K#sARQ|ixG1Y0XDyN+5G{SlxaZC*ZM$@uIbYqE}=KoyZf z4PZ9|@OQqG=o+GsZkq{0u(Z6lj6Df^jH&8WvxsQhI-iF$>7DQuy%M{6{OmYM*=n~7 z%gESI*>B;SY?BN4R+f;*bWOyGHCc+e#cNeAVB#A0%}8@MtsL{mz;wYvk)@diOx_qf zRpMFYXJ0xCi7ew%*p*Cn?S54e+^kzpSX)d6XUl1~bbi7GoQQYBignjE7~Jk>&)8d! z?cGM41#_I$?L1>9U@%q*bj~@V5L13=I!-BODvq*h7EUU0CeF`lE~Zkxk~$+3T+vkl z3IGNOH=B+C&2N(o_XvoEh^G(np24Aq-k@jVXlw0WVa?=O*clp!UR+w8XG-NiF6P(! zVCs98gnS<_tfDVj-j;NyowAnKG`7FoQ9r7aVXlQ?yo!kbjOCYn3@C;Xwu0RXbH#of z=KG}u_fcrAe#V`bKQ=;^s0~`X?(@}RJLf&~^DZSVCS10>vrxa=0plM0XK})w#QGJt za;#>ackU3YznQfb{O&mRRi41{eCZ!aS zpV%cmJD?Ol@W~&Z9Z-G)$rdzAnLck0tGvwt_qrroO9~?qzo(~06@i6)c8Pe>c8!~{ zm9pd;wThas0J=o30f}#k>MHvNe~MH629>*vXi(idv^N?n?;C0=gk@=I|IJuH|W`o)>+^(mEl+GwcWMU<}b7 zBeFRz-An|-PAog|$=Zhl9zRF74~`jHuTQ4Z>tbwsec3eIhCDqqbFeLC95jNx>@6N1 z8^Z~#4008XzDg*#m>ap;TY+_~tmPhO4vPIyfI2%0rvh{`0)X^#>9qd079n4{gyg}5 ziI%j(5a;0!uJNqLqZ#uhMF5Wj9lxV-SXj!*%*mksLWftHEzZyEYd@lai6%ngcN%7U$;E@ZcO+3{R3>+RFg7}9G0I%DfR%sO-@yiK< z9{VT*)Q9nk4Yop=V&(0KW$m4KGz&U*Wm*yQIkn;bQ?j(5)lxL-ha5PLnZLE7`l$dr zW&ohP_Kcz-&pvWSwrw!lDq6%hE^T=&Z(GtuI6-6DSVEEo9Bnl%Y&Ed0oc2xKUAonFf7Gpc_KerEwca|HZOB?#TKbOfXRBjutyCpa zRL2|%7G||X=p1}!K)P=+nzzyO4s-hY~>Tssxrx){7?u~7l>C$1#Iy#lo_~F zD2{sF3 z@#K>G_hnJoi>N}PG(r9p?Le?M%`#$~FPYpPbBjfM3Wy9H-XoGy+uBkLsSadVD`5do`yeUmTskz5tudSs*>bdDh2t0!o$P?3Q$UL1*DQIrm_sB&m0YQ zvs%BE`O)S5U985j%a3Z}D>K(gvO$t0Hu|xoiUN9GX&#zb7MPf_sw7-P5FB_1ftXs> ztk&gp+d_qfnF>sqJUdm=nfuXg<#fK?aJo>Z#UIj~hPZ{iPydufWAY1zJb zf4S{1`UwOATqINTL-XNXAb2Ddl4!?JAfQjJB$QngO~~7g)}nMO%IOl=Wz2x&W!6K{ zW{2PYMCJ3nzL3xoPnxK#y|=Xyx@}mjI3_^Ybo!#faA4V#V&+D3lLqIx#&K-wBA$*J|8n@VUpnj2ac_z3*4g*Zjex;!X)`N@~rdt zgTWeYi?;|@u0IX(P4y=|SWHP)YEe@pvFOj$*>fc$3qtepTp+Nd6_UP=p{O98T4AaF zCGikYIT9TJ1f}d$s>zM2r<&$NzxY*tWXaGGz9Sd0Gsm_Gd?0Umy+32T(|tJlME@%4 zy{WII<<-l~>D^CB24qpU`iJ@>x<%T9D#t#? z6V;@f7v1Zq-|UiG`c8%>FT&22cOD<+JwNidV}&OS^$fCO))m-Dv$9#SseJu}05CaQhAQ{Uj@&F|{mX*^U zJ2%+1!A9}Vw>nRV8S}sz7=t;J`sHKFW(7Gv3V?#{&M=l{gzpkk~u6Y?_}LXF$% zdF#_{4$XD7q%4k20T^>@{OkIEyHgu}Rr&oa~`GMH5Rk-DAq zfpa|F+>bdf|U`8P0hLQKX4nFKz2c4Tj~OJ$9;R|a94KRNa?vM6ey#;PXl;*)&o z+vTs(GaOv|kyY3B&j(6DMb!s+gY(0G zuAXSaJ-e+`ETNYJdQoncU>Sv^D5!FU}Fvk}N?y(+EmE^eI(yE8n3(>5(#37_AtP zI!3PE)E)rpr<>yLimI7-wRrwK>D)alj(%yonkjtB61j~oy!t*^^vBcPP)|Un-QZ2WNB%>f(xvfI7m#PJ|PAH|s^B~H*#xXnYE1!e)D1WGN7&iQBc6KEc; zA$}H21l1@&k;ws&jKo|9GlJ3`QKQ2RzTZDyNl^@Syza1h(J1;$O#RVFN&BMVnYGkE zG$7JdRtY>B3fmGkH_#PA%{nU=cPMh8sW6vu1k^PX`lFtNXHlV0`DMoV&oLkAd-_1n zc@MFPLm*I3ak&3hv1I@R4Yy)Vz2BE6qWu<*lbPgWCU`h z@$U#$Et^dah8o(MgPr$lE8aWe8@$9`&UoYq1D`rSkJFEaR66~c_XVj6s%l#*i|S2wmzA7>vt zyU>|f26VJlBUTa)0D>5T8Tg+G92DY-#1imJ00{?|K!e6VbKRV3{$rV8(4)gPnvQ{sl`1-Bt*isYZ=y8w!xP0KzHO#%aX}uH1MF$}=#V zDaG9AD$H97;Y(2vGMC&*EtG*tn8P%?Yh3oCiR8ny7hn5b+sF*&vXeAxPaEzkbAXPR;k(v~<^> zb)i2xtG0Jsyc*oRCVRnt|MLB*F>b3&b%s1~`i*03dWsqumiP!NSB?OkZv;gQ_LS;R zE1xLKwFz0HjJyg}3?R~!6~L*rk|8yukYUMbRf-wZjeIiLnocJsWUcAOx!F$Fs}!u0 zpdLh#$U!7lh$%v6DG^M^AC6X=fVOj->6J(O&2nugNgKbCeKbyeVRt_gl3x7$uEk-i zK8wlZ+NS|^cYVDaftxm)exN~hTb>?(weizdGBRmfF0NA#op9F_{PnSYKpEQB%4Uj*S>4X@^$Ulc=g^O5q>> zAR9v#my=Jez-A-W{N7r30YQKwJOjT~QZZR9!ylegEtK)~ z1geWixN1*EMkcbK4G@n+Qzj+@{vql#4;Vz#| zBx4-XWMq%u<9*KqDf8#qAFol~`wnylHhz4NyF%~4(7q^V2zTn}a9lIkD+pH%1HCLK z7QI(1b?=Csq{JX^zMr5eI1=N$5s7LlKsWg}((*~w;8h4}7u`!C0NH;D=WiukGOL>&^9kMTTTYC#v6j z+vBDfaZYnP+!@+~Njmep9e2;`^%)0-1l6t*YsBh4q6@*k-{T9aAHQ(;hjyp0 zSHmM>^!Bp*-vC4IzoYqa|F>v<{~+|S{m0b3|B>YXF1>Pcvi{qKNKol@^*8#^h4!Rm z&Ye5#t;M;cpM5|dc-`cc{&Q8-4uR^8CbDu_T&!O!v+H z*q=V?l!8nm<0Sv)q{9c2h>}*)U=#y$ie_iD)ADwX`3-yod{NVjHJT`vCHH%=v zb+vMbDtpT@$&W_Tc4?6b^FP_HhS@h#(X~lVgNqpi$k43==-O+QsbBOn+jH}_#IwMH zMEml?mqjT8h+#oxpP496A^y4|ganC*LuzGq4*+l2`)6ApUdQbzqmz{KmBINlz*yMf z`RwKOZLpChLrz|8IbhfG-7!wWv`&@gSOljdH?J1-x2~dnU&EF~PyLC*h}CKT4D9`? zifUx14^pDR#~5*>3K78Jz_hGOQjV4QWSkI(dRe@?nDf)l54?t@-FWG~FD8tYeUhCn zg=Py026=$&7zz)GAX#(_g&ppc2d0cK#UT7CQ8GnH0)sj`MPhi5dLZ*?L1W=Z4b6nk z>3eh4t677K0)}JT>VtrggFFhUpl@?7kVr^Gd@G0_+S@<054H*NQG_3cv>pT!rCv1C zb$jtZm5Dr1Wg>CkBF%ga{mbg3A)yjE*OYA zCZvC7N4VpH8vb;#_WW1fi2OlDKLxjPPrQhaizA`By|xJYj124@9M zJKpY};PLby`g&Sgs2Be%G0L4S73`Krnh{A<8KD+C1i8HX+shI8fO3d3GCAN+WGf+7y7*vi@N^Y>a zMWVki)UR;B=2T?asW|_Cn0u?>I@)AQRE%59%q)v6w3wMCiena! z&>2dDv7+mIQVVqE>g`P25?Hkdr}>|u<{c-XXk9W%Cc5jQQY54#q7|M?#ms~si&Np^ ziMhvCx{8&t9$T*RJ=jNN6tgeb*@gLfeIUKFGdWC=7GIRE(;Es3lYBB=gMp!L-neHX1Mv5ZyFr?SR;Shc5-Z$hw-?r zD_E{PG|w0jBj%jRB!qFpj7gHvv*L$uSut;@;RSe@{UOG|?&{YVi_V_WMN`;BP>)hm zL^MACr`l|`G*0_)8Hy6h7MHj#sK2XskY+sU6_jw9b#02wHx_L?Q~!APY+bgrNHo?b z#Ys^oi2ZKylN7+5)D)4Oy2W8dFaCTcZ073b%-o}&uMfHS9HOgK9Lbe&>XXuf`J}Y+ z=u0+J9WwXqu1&}5+QQ}z2TEFG^PQt5<+RfTTEw$OYhH zN=+gP!T#i={`)sP{_M)tSCtB6p$$?I!-*ev)EDLHT{w*$0>T$#d;Ab$-{|PLI%8{R zq~7HD^tIY9%vzDmY{|`Y|4cA;f&;go1kbiV!3f@Gf)V9W`>%mAEBn$Zj{S02=nP?B zZ5of?El64eWl2y@8F3rsg|0FCG1qV<=)&%qY3B=Jq zGa)<|w-X(96>yw5NinprK9fBG@Cab%hu!IxGK}o;NgBlPex;2VCGo%>oo(nOCh-^X z0gDmr-wRIz#SOp#CsQRc?{_{otiYp`te=*Q*LI9|-2OB1OE89ex(!#ys3N1Jmq=1Q zYgn6?Z%3rz&}|Fz{iPev_VttqdDdGtX=yp<T!y|nqPLU_KM|6HyC$#W~??4fk|gcBz9_<8qb8>W|| zloJwRjf|{{M%@|A4H22NFQ+|_%euzN-C-X$MSsg4;xVV5RHX9vn5|7Uaz%F1A2EJV2z(ci*WO0w?xpLrdEu4ly}86lbqJ0pc-MBcqKafn$@ zwzfyG_bTkDRiWa0(TKhsb*3sMLO<~`W%|7-2_i0>;UcU6J}doTDnT&zsspmJ3q#lJ z0oH4o4<2O%>Wp;0@ol`Lf(a|~U9Mrm{jqx1k_7F7JjE0~>Fz#rTzhxi;L>h;QFIE4@K&ggi(J^mZ z0OoJQEn#+4W9+cc{MgCWRgIY6OR|~!i#A$T!O2>iN5+_329R0N`%2M>InC7rnYksH zY7bzFKU1Z{ljZOei?qE$^Pi{4_(?ZCiXa|_vRsiE?3W!(q(7+xU=*(EH(X6aL4wD% z0ucnw0*()+t9IAz8*eXv<2?0jA8Pbo8XmWNJ}xFsGJHP^UXNW4;!fxDuaWy_d8`Ug zpX(3Zs`n<;u-`VB1FhxvYt6pmg|xiyGioi}Z{FJ0V&o%BrkE=i*F(QFZ>NZQo(G&s z-)&r;Kdn;qK-Xq-{tDav)o>lh|ApbY|8S`Iv?F8xTL?;we_N0K50kRLSy=pQ&SZZN z^}h$d>|C7x75r9d{>Q?Ws}3^)0{!``>lk8~UGfKOf0^Ni@3!5uEH+MRm;{lI@jG$P zeOj_o4sLOxR{pZFy)0`r89Ft(U6$vUhOXYjCx+dvX^@!oNKS};Zv1dnY4w>-%x-~S z5%?u}^)_ds6Zfu$_W8+-3KJ(QkQ4iie6^O;hG?U|rMrE6=&GEJ{La@%9%B@%4KiDn zJR=|W)nhxVm+eb0w=I8o<%~@ZkUYv7;LKt4^>ITT$z(7Uf@!4*-aV1KY(Ib<84P)0p=8nCiM`Vn#;3JH3l^s4S|*dwAp-W!7(^Xxa+&jcDi zum4k!o?+e4hc`OE(Lf3JDM&Fg6#EvPn>8>z3!!=EP&Wm1GXYcl~^9|oDR7(|I_3x%nT%3$p2+>bB)%rwP|lzEPSc~^|;fxe`# z_-f5=Wq~)LgT1ZtP|R`MI3?%G!?&U2Sk|uaST-nO^3@tx{Pzpq!)!T12nwWt8Jom< z-!x`|Y5weN?MzCGr?VK*M+YV2NkXu^E?jxBK7uGO%YjYz2_WqB-5NDEvydiKsT4KO zzW2srMSfL=@f0K|D#sWsQj{=p*X&QR9=c`7JkogRV<6M8VK8Y@pJxKxTCp`@g-M$v z)@tHeiFoYiyo{0iQP8g|pn^t?yQbYOLhtP^>4+_F7eT zXo;3T*duzsD)1%V*dlLUI47LnN76R%Fw=>c~S-LV3>M(_6_};_^OMzLy~BAZX9WYaI^bfOivWs_enA8>Y}V8QgUba(ywZ3Rl1Rob z7;#sEZ@;pWA%niVWQQ=_F(D-JmLXp2IP1={Hg>BzQ%`?i;Shw_LZ1`rB|{U9Ocss8 zJr6jn>^zwoeun`HgEJ6G+ezp7UpUb;-cPG{J3UH5WRizgLSzRX#W4VnQMo+WbPNeoV=8HIvQf%7B7Eb6O zVbcca(NoRMZ~7O4CzD=hY_?4E>x*xiAYOkq5%!vmAq<_9Y+NW(Q{|m8#oA(9Jq~u% z(#;nO%{EP*IbZ}xp*`Yw~Z4`q^9ux+HnD^4pUh`}*wA>a`xwPinD z7-sma2RGv1h*y%GeZ@}DMoY9%5Lz%%K-vI?gmh1-bESUi-#mnEF|;6bfu0}05@Xis z-50O-S zfjSTTx{m)nU)!ux&4UQ3yYYO^@2ANlqhWXmU$k2okGU}=Y=)jVI&eSEU>gbnHVhd$ zG6i;Rg*HJlQp|y_^+3?@Za)sSU;B{Vc)++|X&Ez-P_SifAc5uaC3CqA+xegk#4ohh zZh|A>IheBW6!Kxw1zP&97%ezpdm^oOY5TNE)}?8R_5&%Bn~=W5nNL49hs~BsG*~ow zeQ3SBcZJNq>*n~l1)STTY_7uTpo@@48%|uAys4O@BK;~_@Iw@4K8De+tE-0=ububw z8zVMkcbOIESDLY{aR!7kC6IMgdS?(A=85&p3NCLwqY)<@?KFUURcax%M@=zOTWfIW zf&|WuC(5Z#lg#Crj*GPDrdSYsC<jE|KITisx_K{t^{17xYM1hr-AId@$!&wd%9GON+OLnI z(;>`L%nFW1+#z$>O)wf`L{=L32fN&T>tu&n+^hG!hwwQJs{*|6#mskP{DO=h=ODvy zbg6EFtDr&;*uBYqACu2I+y zNQ5Y_gU+(A95`O_n+nW?5mbkjxBW&XbLtCsNBN4sB^r4z-(SC*z^*Ay`E2`RGc;S88md|MUp=VzqAZ(-Pl z^(*Pg!jde78#Cue3TUhf8jt*s>f6ielM|BtnfW}KG+bM!SIlI6_$SdxV`K5*Jf-NM zwNmbFD`nuZFt+1nf2`qB7tO<7{z_!yas0~GNEVei>B#V!GRj$+$cHnrpQ4%DF*#DO z*tLyw?c+iw_ekCdH!OS-oU#BSzOnSUDPRWsji;=K=e*)L)iHq364Ej{=UaoU(XhBc z+X|THytjTez6Ulmu(}M2QXR^6TKFKgz-(?HZw(o#jO__U0*L`d6EJ)PFLjFO$+-P1 z#H(2K#{0zWDn0#M!%}Wk^??N6G5bA&+0p%3%i-^nD73;ynjk*ohvc6r?P_ge08?F) zJ$FiO{Y!nfA{D{``Y72n7Z)xh%5|pnjPGARx}=1VER9WvFO>-fzY^}I*1$H;GrtbzF|JR@V#+=3TA)?T=erUCIc+6ZRm%zyZ3yHSd!2Dm%{hJWHDw_I^0; z6z*>oGVP|>X^X{!%A4Ft8;Hau{GN)N+a(W%g@S+!==5A`$nfogL=%>e1+2w!r)m8p ze19smsK7gAIH}Pi(zC8vzeYOx#dUM9?8fBMx$1MZeYB6+V}}S z`WRpq?ZG=^nq!Xm{D%RYI8keqrWT!j zZ9{1wFdE~Qsylje0&<4x(`E3PQH0GpcNd;(OtaJhsuKFCh`$o#P9KjvUG((C3XmgF z;Ch7+^%EA-UC>JVbZ~8OPgmaJdxs0BMqGvUtW${CH+0x21NfwUo&kw`Zmr7>$wrty z@g-YEieIDvd-t2qqdFm=_Kf~3EYxgq%Z^LmudG$p9OYUY$2-d`dE*=N?{m-CF8XHZ zG-ANag@yu2zH2J1*Qhg7n(#erM&4^^53xuUx)a865qajT*XBrB$Ea)Qt7}m<*oylW zVL`8&X01_~Q~6V*N^S@|rc5~svF*IIofj5$>$ySoHvVZ@N0z{>O7w>;e(&~=LcBU|iOK5dKl@I^f4bn17hVJw?4Gx=?SkDski0-q znY_KlRF<2*dpdtC;c2e^`i?e?y~nJsfr;&!3od_R5GYUw7p8#=iF3t}nHS1I|LSP) z7EBa)tAv}Nh;|u>M)F1jTfEU)Jr7f0J$gJb_p_v%RCpY04;MZpEci?9x@STm3T1z= zu*%me4aOKB607+uD$Ff;nIj=5Lwni zl8%t8#$8~4Gy);R!s2d=S57YB)}%NpR{j-|u+S3sb`W1ZIQ+v&T;&ryMlL z4TUtvMhh6ff1S`AI~ArvZ*ETgiqk(kCh5&q0=O|LEEBe+-n9|vXSmp&toguM$ zbn$gyesy|WV}mZ#N$R;`&zTrm^Y-hhj`ZxRs;<7hh~dDQQhRz-evoh0tI3i^wL;P= z<8HC<;k+H9>~I}fW)PQ^-|$O2<`;VHz25He>WMjw`T3BN@st0H+MlhzN}qoRGY=@*kotEvw87ccXOmHF|^Wud|6hjxV5;cXsG&#BXcr{ zFiMt6+RkkQOx8pWrGN=C1xgH1OlWsf3zslNU9hxnS_eg`^~4y#=Y=nceTy^fL9C3L zVreaBhe>u7u`B}0)w$Z}SYzfn4S8n`FjN=)qtOonNBC7mgySLPTuPv+_MCNPVBAXUwT{(|$5Iqjq+kQSj zFJIU84#K5@v5W{65EdlIc6oNwx=OF5#T+t?*>n(wEl?Uocc`;WnQIH2Y3X=9H6p>1 zLL(z@Aswo!_RhR6j;6t zB%(HxDH`2X&-7#}LgIS8uBbXwoOhd6W23nD*UMSix>{vvV-{R$;Gpv`np zA&|sKX?4+lFw9Z4I;hkfc&9fyN8a2%|TQXn|sIPzmokEA?XoPtgi5Q6 zhd1OZpm%Z*y#dfvyydL>Mh^1YcUP^5qJ5B}qQg}Fj-5w(#pXR<2ID_Q@Fsx`FFO6UBPHlAd6 zUe^9itIneCr=?`2GgLbtRHZP;aJC`3U3gRU#$t|L*y|tTav6OFg##c;)>4KpPiT<` zAu8}hu9GvDg3XM^lr&%-ERCFI=TzY%umYWpw9LyQu&$lB*D2nlS_j}9*D&<8Fkc`_ zVJdSf7qPQ%Piy;vWf%q{+NJn0rz<)o7mt_)G>#A9hCWz4H>1K8I+oQ zvs@I|hXPVB9Nw1p8DhD&1d z8Ik6GokNBEv%wJKCk$PJTf?6=Up()EU@1(lDv?fJx|Oa+TR|BRQh4#$rW%-&L^|a3 zv$4*-m^8bKq$K*7%uro6kBZ^SiE!XkFk;i zWN9H&6B@ROvUArP`e6u2vZY_=oz$#3DcK$qqBl8nSWde2xQ2`Qsqu|sy|!9YiX&yI z?n%R3MnNwZNTxd!>Uo?eS6x$)8L!fPi0yD(l55F?bLI=cg%WX1Herlw$I92~+mFmG<6Vt7W3o2R-qNaJmHd z6~hde_V;b@5$-j9Q44RqiBZX_OEe3=kZi-aFgIoela`jSgW`QTKD;>WCC8=+4T71N zCG_>1PXa9Yz6K%Ccsj6<6vWU~x>3|Yxyu&jk}2vHTxjZ1^~S?Wpx?iZFRSNs$%LR= z`rY&7hyEHi5ettlKgf{fDAX&llB`hv;@?4gI7JHoXM5am5#`udq9FCcMS!F6ETH6Apeo9z?G;;8qHkn z&8dS(D1Y*taK;}rGVpM%qw&S&(r>r&+~fB(NKITg_B1TQna-`2R>y?l0X^ZaOKJuj zu&_Vy(jg~C?4+dyaV)HN^G}6NHCC7fj#0QnL>-?g%aD4Ok)vGr@4Xa9uhayi5SWC; zpK0y(Q}n&1&}MHLIyN@JijIYCiojrC(uBb10*o$QVN;&xO#aq8ACDkP4f`A^P5yZ4 z2c4$3XG$kS@|?NH?_)>S^(rKYA1dKWpfw8n_P|yta#SW2QrfiblEf6i>mEuJ7c2uZ z>a{XHJkr%djQ9wF7-v2%nxN*so*E)+1D;#Kr|rs<6_3X`{R;DnqH!>(OqkGb$0NrY zQnlcInn1N)ev&@Nwe=2ut2(=y)xnnkIDq+NX}dw#;zB-(@>zMr|<)2NXC`< zd7A-lWqZc8Zi?ewu>&jrmovEpjF`5?uuJKlcxct_M7T#LSt&0tASsmtAQX`tFvt$! zhJdUThy06S=62tyuAQG|NOnRH!YbgdQN_QtIjL(#{(?Fvad6O?Iyv`%%yxW@e*V3c zMRr$NMgw{umW3~$tL?sJ!&_SlW)L`6=+qz)dy7(2;doTMlJb0fV4*sVj3V|ZsY+S; z68`Ag5k6Q;H^9U)Xuc4j5(tb*^`<$u=A*O0ph2(3o}2CN7cKsxLKXfTPM6{1j8y;A z_R>LdQ$>SpcOkT&Jp3h`+>M#K6No*hXN<2(GKx(`frPB)@kTMDCYe!yPS%HTSI0MU z+?a&RUIsj67u$a}H7g2x>Y`Sj7Q!t|i`x<<`=!#L{0rIc`YQ6LbFqOC38XBx%$F)T zo~578-lH2fT{O18iGENILneX zQO_@xyKVW!qy%?{ms^#HWvpVR3*KuF>&O+Os&p4Uh=PMeHd~)=S%MDXW_sBjY-8*d0OU+wdHWD_- zSUfxn7aa^JotpW?hpO*FhH%7w!jCXt9*&(+At3f+3ttx{(Hs>)aAv;9;AjZ_5$^JD zpRs35iUop=w$c*B$R3+m*KbFF;f7k9x4g#6jEaI9TUV@K-sv4%J(1H$L?kOD6Wh2| zA<9ZHfELPn!CC;R5f>_0-lzr99Hl|pXfADXG`sble>zv$t`kzS4(kMoqOr(=v_0e) zcXOdOBeN8H%)M7BHBR)^JP`7D{IgAgzt-I@F|V>E>ha9Bka6H@4Y z${t5VjL0DkP8W28Qbt39Z85o=%6zZw3D`%*x#beYEhkkZIaq&mPZGB_WD;~DoqDG< zfP1hNhcX$!q|4zj#KK~lHxf0#L{Q%dqffb%_^|xe`czaN#htM0c zJZEIFA-(kY;2_ONV&%s8ewv}$jpYQX6<`JVfsQiC{Z*~v{42>S=f6p^`i}$e|KDbc zf3=tY_xS$%3dYIF@?Vd^O&VJAc;e_+4SoVe-5S19g)EbAfglmVvxj-)pe0I$o!AY% z&BGU~R`Y+N51}6?J(W$Tr&pzm0)dw%GaP2qZy7$!ggHk}oZ?~OHc0Ho9ryodIl54cvn3bu;D8_ss__kUGdT&L2v*3P@)S!^Tj0 z^CL1H(%jVa&B?u34POLi39L9=8-Scg!G=F}oxfL&KYyuQAJMF%$Bs+`FLSNLO{a%S zRb7zO`Tn~=k;rPO-fY5>F!b3%^~>w(7cp32(~s4j%bYsde(rr%7X-Q2I7RM7A@>gM z4JaB#(0&sS?wrL76BO+ZYQEP;2wd}id=3@;S(i5YYT;GG=h)%hOf=!FI4GmU1%(Lt znclIvBxu3j`kZXL=E(-FV^5fC5nKUVe`POi%$&OE3Sd8a;25?vMqtR7VOn{r-@6;C zP;wUeKLTzB?IpOWBjP;O^n`Vy+C%YSsu$hP?t|-tHQ-IR>$h8$hDC5-PGC;NU;%aX z44Pkkk$NdD(96*4G#yA#q;wc3DqIN8m7zjqhy(ZnrO4{|?se*9r^Q1ZBL&WiZBRUW zs`EpreqR79U6Ef@T#?t^d+4yDMoG>D<~3=CnZ+)yFko46c3kii1DbI0CtDM-n+A7>XM7U-6RWE2zk^EQs!Y@u4%>X4)Ph z;yvnFSNh?X)|O3ph~rX^CR;6l0uG2ts0EQD+iTUC@`Rb`uCxZoD-l%_anH?=gV9G6 z+Tgajr+(m;Z<`m zt_(G8;$X$%L=BgCCl41t7cYqx0Imu%7k#njV>5HT4bZ|QLNU1WxCi2bUx0RdVU>`-gZ+IC?^0)SsUKFc8oFM7+|8z$2Ylo(F zQMeVV63(T`E_S|nIrgN6Y*z{uF}ASz%0Vgu?u@b7*rbmv3Gbk*C>qU!QKodW49YJf zy28Nv@+g=F^!3FqM11lt7u!f-0HWp;-0jnLjNe`wAzz4Bsy_YZ)5cuyEI9Y&;u%6H zqrsa*vX1(tuZejjnhUtpxub4Jy^$jyXEH7d;(iWz$Nv28NtKsku~)e})m1^X;8~M3 zTz^^V#QL&o<|pCGwmw!&+!Y*<3G%t4=!6S^3>{#xGi_!7w-cN&*npH)>CziSJcSPB z=Z1rs5k6dqIwh1)G|R;#!!UHLijAJIR{4U;60M?h$kZ>4G)U)7!*O>Y&h42o!6*u> z{r(kTzg`8|W_ujQc+$ucdpnP2v}^jMZV7b!!qPR%1%mIFl%kHEY+3N|gtl|4QF$E> z6FSgS{i*&S7=32Szkhb3vN2W{rip_Q6MepSfns6wvRQGO6B!!nTj435!GnkR>!D?1?94rr%&N6MTB(^ul*mMX>qT8OzN+~l<%y4PP@^*wRD4Zi?SAD^* zM$@K z0TuX%hG*lc9gzU$Y&po}gn&GeN}K7=Na}|t71@#gEVwPOFh0{8o-5%UATH1mrSNil z0ldfqQ9jKVm`Uf6aJVVtM*973Ea&B0AqveWS??6mi(J(fjQB&K|NM><$!5ayL|&}X zAZx2Q)wOsJ27F1?!Ef1Lfqh>7NC^i^S`M9KT(eWEOB<`}i3VLB?D^UmI$!76ltl|; z$1UCtan3a4>*s1W3M7%{3&{%rOX1D>O}zFo>VZ6-z=;&UHSUpmhlG)7Qr))lm?cYh z*07-d;J|v?(4&sod^|B>yDxk>E65BfCcY7tG6*ru`^EG~7==JLlO9u*tLECN$-m#- z1~?DHVJn=1S5h>yUx=)*7v~*w_Tr%F%l28R=r3+fS*pUq!d_D@Dqb$~SN%BJ?vY|@ zB9Go%-*l%lMtny`2CzyPp`G2~Yj&V=psR}dhNIPqS-X9u{hkixl}a9iu--j7{b`p6 zeZ0OBT-70*{W&|y@wH&{;CH`&o#^AIQxFCG#R-`6uf&3!|8}w9e=_R-TfND@$M-Lw z3Z{SVO=?Wo;&Gr~Y7)(gs)5^`(+mB+og!vN=9Y{TjUvYkWT?m4F!IvWP%4az^9~F0 zwPOscmv#)r%vux#(;DUK?3{5rD>yscMX1V6*Oj5Ki(HS~&$(>rRVfNU9iui`y7R36 zC{JtRm^!~mIMYjbEB|;ra%?j{$$Iq-?sGH@aDs=Ob2%WUQ$ZB!;RFw6Dm120$8VY`%VJ8VIIGzdd3#kaZ8z?l6N!X(|GeDfB-83QdB#2Cty310eHN3Yr>`UOisB1i>w`}IO zl7<{eq69w~c{ex_oA=rd@UtEWV)kI!#0}4%$4zjwlf(j@=`iAPw(zQ+?I1nyy zg15A=9qN%Hh$C2Pns|W@G~vs3Pg8)EBv%*^m1;Z_lk%zbUvbD7#ejpBMPGPI%4Ws9 zI7zM8_D7*Cg29rKN?`H1d3MG|{_>rqVq#5Z9_|jJL5c>u{cax3%wG%*v^NY39$`#` z5I)AD;>S-c!$cyvM}dmm*VZjnw~HO9H57xz6Tw z$qnMzy+;GdlRM>n+Iq}Yr(kp9*U!Ea*rCcKK;(rooJ7W-^9(0Yk5(M3*Y{LNL@YH% z#l#wEIbnPl)J2@Y`0Pg*`d2x|z85Q@{TKiw&?IwGaQvwA8E;4TR&?ya?l{4n+0H{k zoQTx2RKUT>uW~t90YzXo@cfd|(3}mK)A`a2OY?2}taw>dI&k~619%h**mJuxM0PaOx_~rLcz*UdT_;&v)gQM2zss3VM-<9X zJxd#$O~h8o-nea36R%?-QhrGGFEZ@C$q9{NdseT*);=zTNOt7W?KYDWU}O$0*Vi|B zQ-9{m9}}M!$yaWz)t~?EI{Nsw@yg?=kLjT8Ejo^WyrrHElh1|?Ti`H$7_8FzRf)2* zLad%eYbBStIQ{EMKzNbL&W<=Inj)eI?pZb391?|ZobgOBiVMoQ4vCW5CQW$ex548W zGBjv2Lv8C}1z^k|f=R4#^2B0HsgHQ0HNz99TH>$|1Id`wjtpNee_}I~CsX+Wfn|%( zvshx^-j&w;YI)oh;tY#*KZ56YTe&@0+gxc=5BO6X$8s9T3PuzeryxG#vCh_Xm!;iJ#*ZasnbZt?f2!8dJ|zGj>!)(9rlPNoVgq_OBq! z5#x9)b>J(}P+>hGDI9$?ik+#;Dql3IM=YDh?);eOPQ}4*=}cfoXHw~+x8?*`g3(}1 zdS<5HzR7e16kW@{07fc4*_8}oN(ewa3OAGVebeWQ4CZ2|P#sA-IqKw@HC<8IX`t0m zIFhW>T;=%Wp^^rJQYh(Hn)dLH*k_D_-bw6Vyr#nd3d#OTN)XIOyBpV*G}!$@t)2FL}TTvIiN}(B} z4GuTh<^zNa<1`QJp$pVrnv>?|)H?64CNgN9-&Vk*^Pwy2u*wKt7Vc?>6A!`p2V<1q z9uo3;HOiVGGp}Gpk6p&Q*pHgQWH~v;0j1-*B?1nCEI(}hE^kU#${=2|YLsCo3Uo_Q ze+Jbk6VvZ4OO?l0bsZKnFJEghK15?tJVw=Nfok6IK_-5~Fu&Y-O+@H?b?vsD8A$xy zlQ|`886g99@70fE(VZap!+U+k8GIPRRZRQVdHkste(B<~tQ?hFG1eNW&EybeTFY&1 z``jS9{o-d|DVaLgZ+~mcTCpAB*z7+1fn`!bw?)3S+JeTM-CwG2+-EZ!^{X>=X zH)`QO+Uoy%AmCr}AZOD3u*dz29?3J2W#T zO7-TP^l6DuE-zvp2%lhYdp^_Qxv`{1vb6F>o(Tpz@N}vlgXp5u(2f}1o^jKOgdj#S zP|0eLj8lbnT=Bx}xC2%!x?+F5|1g;51rfyyl^7b=GADT6+%5Vj87itQRL6%Qufg;3 zD7q1h;of6Dx4?wQuDE{a7-WDpJD<8J)v~sv==kipBW&OgLtBZf2uMzea@F!9OGwKD zOanf_Lf`LnuX^S@u_T1zzGPl~g>~sE%?sHO81hMa&8W!yox%)=5FZh|;o+jiDAf zy&u3p)G(vUiG~#M>T&5!V4XPCd$Rg>IVELDONAN@tG84p2y^7^2H7p`onTPEErw>r zAv3rI;)F!)u|*+riQ`|+A9uzZmH8~~Gw5-HdC{cNF{jKQuy1#ZUcL$v@MJ=}70pwv z{RnZ%Qjj!-#NR5ko%#xu)Sbwt!RI2xhTi8Sie{w>Gj&Rx8R8k(P>-UnVlfuOMI_J# zOpPhN`*y`oNK&im;Ihhd357Oy*KE|1Fo@sPDNJ6_ng$c#z2@{QVaxVP#|1y-)=rX> zox_Bj3B1YhX@xl+_eGT3PhRiMey(@Fk)cuT1m*Fb9Mu&263q=|=#05jDIE8!75oo= zXUm}GD3g|;LR)QJio>Ng<}xBysRPakNz$#3=ozw3&BwdiLMV=lIy5{L2*kl;q3e4P zJ|DjVlu!ZK5x$9Bzg`Z0rzMs|nZk+kaZA4et$5#t=}h9q>USeB^~saqH^uo7AkxGp z86=6rMm-cA30J{IqL0@mnOeit@RuI25g6&2;1)bat9$la<`%&IiyaMy_2J>_h9qeRQRVG5S6ySX%^jy-{dQ?P3!X_r6xhK*yac zO1Dgt#i?l)MTjV6(BMHxPUf2|+~Bq!nifY;ZZfT9IcC$u5P4Td<@?n(l8HX~Vg4)V zuQ{3eC*pmX#N=7c0U6j=d7&N?$@tZa2^wKQC1Rh^Tq08dL3fh0s^S zQObLfRy2Cy4p8cQ#elPrzV>ppbkfHQA{d{)Y07Wt3t+b>A%uhB!GgWsHDchOfGqP0 z{xtCkNP=Lt3DOV+HtX~B%WX)YRQ94BH)6B^Jpi*{UuleAj&W=~S`V4o1mMPfAZTjy zU!bo-Uqb@%dyv_qp=E?~M@o z;>z9nK0K_fH1vR?-)67;RmuJg+xy;Jk%B5a{_ZVl-mM4FkHYDTA;q@=*a_6`mC03_LIJ~a~`gN!&fbm36!AYKO4_hVbBK`)&s74OW)bSVCB}UG8Fo}|8DgR|1E37; zf*4KKGguKl*4G^^mGe&Qny*%|22ylXMQ+*NJ@M!w2bKl~pMDvbrRTG|tclqgIlwAp zB(vu>4Rtne$%!a*A}J&5nj=Br;RvOtS%Q&@kOI|Ie>t10$S<#N4h^b`6&Y-9=KVIN zA*k!BHDb+>Dkjt$f~fuQkfW#7-b4ePFG}6qZK)<76V5rl#uA#f1Egwx%U?MX4fAgR z5d-rtrfufG)z)*F^hm!C6dom(HwOxy&OOCFgBET&h&!#N=@KQAJ4>w!Y3|6Jjae2@t@gqhE7yYuiYsE5Acb;>-0g$8xs>pP^3*r-I2l>=%4_FLgej{D7a`(EmqX=u(a4>Caa!~XyYni#}q-m zmtJcCZHFPax+i@3AiIahy8V8XY4M|Q7=0p0;xbTTff!MYvIGv}Mk$B^xHljHHJxpn ze>Xd+Y$;6QXNfr zw%A2_o`o64!4{&Ww+j=PMUcsQbldsZjeGVF za~^`BQ>3xJbTheXdxXOMDn5 zY(zauA--3mxDACh%;u`r0G~LOCCj>bb5S%z%n)1^CzMHna_f>Y@PwfG-Mm6pK`UzV z01k`E6gyKX`1EAC+5LkWjf~U)JYHxm@@H6ULm6GqH(QPl9S_|QilT`WZ%d}x23iMp zb1>ooM>b3q-o3%GrCB~N$H$q-Vj3H#of_CYo+)FnzHkKwikKC+wg!6Ffa?lY+%!m5 zaZR*z$GqqQ2Qz~r6;N1!$(y;0&clp?ZJDwTR*p~3jL#ni0USpG*x5*B!2=BIgr6*r zjWZM^2D591Kog1UC~NeUp%TZg8H!vylt$MP^!^&&;26=SN0b7PBifpI1poq7#0B3n zBg8=*57juQ^w^p}wp~l#tJ?9Uyv9*_GPY$&m_w#b?$=wlL-W05KTO-t^_j0 zN&v$*FpmCpdr@)29~UHePFq+~RgN*?s681qf;2O$!gp+$->^j=_$<2daUWK+2M8tD z;eH2v+Pi=fR6(aHhh~cd74zj+82dnwt-mVEQOAbkm|(}$4T~OwpneU9$rH#BmAF71 zMFH}W1@iwcPYB4ll8u*p*pz-lo(b!zO?gHOZO;{86p0<`z6DKr)#=a>5212u1vH`z zBhO%e{W7vKtZRA<_jn9M>vyl;7~WRA#HSP3Fj82PK{%_(g}~adQC*L+hnPw-4nhY| z2a7)ynAkr(hwjB@qX5FArd)2U531q5;X?bnSPYwV$|-Q?Fq)3^u-SNM&~H2^P>t9< zo>U*>_XM6lo%s6LW*iLdwO2-A4i_dQJfAf*;AHFdJzh5;H4Fq<#G*l2m`|?%fR|`p z@iFbzo{`)7-~=Hse{{XI|C-HD`NmJZ`FgfJ11+yRll|eq{)z|4jN<+)-11k7JI;T* z;_m;DYWSCYTA4YS{u#HVO*o>7qlao(Wr#M@R!s)F?l2}E!C`ZYVvy*ol+mvOkLq{@ zImF1Qj@fntoIJ%=w|JcjnPJfxevF@7TsggYxpU#*XMd^~iae(O6?@SFl)tb zhT*T9SxWd}n5T5EtoVwISS|4i0Yth)l1%t3&5e#vbU~GK^xIpJ*Y7w7aYj9%y%vEM zzoKl1Usxneiq0_8sd+5D;F7O;rpqM--_#M+UFJ48h z<$*s65p_L-pOCL6KUo}pHjByTDqo@9fW`@ORLE1dwYCkW1zfxuf0C)4-~khC+0zbT znQeuzSYD%%L{p2?*0KI{cMGgb1R8VUfW5v@gsQTfFEGIsnl9`$eqd?tG6yJN)}KY@ z9b+oD?5wEgM;N`<>eABKo`GUeI&?bz1?sNZiJw+kbUwt=HS0K3CN)S5VHoJ44GExv z{Oh`t(rj6c+_o<<07aS}X~I?uQ`MAAW%WVP8P{8J^T%pFJn{^5;GbyXBSmDIj;VfQ z4eI{z5##y}!rdG}PuiR}G3rbUxAIKI7r|U7k|2UK#x)-Pctsvjq(G(tXlf&g68Gez zZ$}!}qne~p8F+Q@A|}Hl++~bacsWqL>U}}U0);tFPB>-B36Q03b_OBBeu3&(c|rA>do0kD4Ry(4zNzSPg>w>dgyjaUBgRJ4BB8s9epBd6 z$kgPjRmD@%N4PAjXtNMi*j(B<9y~>L=(z%-#)<4H0(MCeEbhHK;X5rCzTiv0Q5e4> zrRkr0l9)IK-$_d;ji?9s!nt+Mem3!vIiZf_7g!s-aZ{`}MFAIdd(s~bnt=F5V#jzs z=0ym$`d22()rDE%1CM7yk#du%AwQpU{r;}2Bpi&Epg$mYEHw5j$Z1ZY6VYv(L=#;< z`CGZ7agM8xQlGt`#X^5;8|xag5ssFAiQXNaKl&|NCtSZ?p~g3B)FI*FMRfEs?MKTT zv-KU%bFV$La?_KdaPh+{@xy>(ydMO70h+_9t-}`z`EhMRp>+4};6pVY22_G^wv>nX zIW=j1s1l2yT0nG{k`a>YgpMOiO2I{ndoa`Bqu`;Fj#2jPHovEAgVat8$o0b@kdHN< zz4}~3W`vXdhenw;m^3Lr5Zlx!_z5zp#7c0FpW7xC0(a5;${P--0L1E1#K=s_azZ!< z1VtJ%;U*A$dSH;inR9cGOy`goshT<=A{ydt=_i8moAPtlOWOhpaY2vfoTN!)) zz4j+(#>>{64HWiK!IN$z71-BLS31Aez);7(VNooYZNh9KjD{N_gr>>{Mu~P93a-Vuu1~aHe8@yiw;x4Ix|t6q1W)`OZ-7I)tB%1p?EEt zCBRHRG);%7ZFb$q6pnNY`m=xBgG2r%YEv#A#e|l7C-|Xt(8DB@v0!LPmhr7m{cVjP5Yrk5ehB z1kL7%t)$j`t$FIw^S#mkr1o&QWOL(&Zu%&rvbI~qfgmcfG?Y`?nPMHZ%4Qn?nt?hu zZ-vrsxk3#|RRAdS;C*~LH>`ixhcf6%0kKA~xhQ?DmxVIWY&Kb>gGfZB#4(*YI=r*5 zFOjgtO>@sZHeAQd9amu;7w*bW1^GRUCW?NXQxSNw|FFU$@m6*>>9{(4PYaiufHv!>Uy}*vwR(Y{rQ$IRnIL6*%fg)Zt8zwr)|jrYh9`diT8A) z1y{;Ec4}n%oL3QEZm9tu6OF)YNz*~W?FlLIcxmh%mlBe2=}+12}U;y@i$7E#xUt%);-HPZh^Uc539DiJi{;V7RzJ&N=P{i_|6OI4ALD7Gd z`d`xzSQ-B%7%56kGiF`{?TCFk>Gj*~fCL@qZ`eXHstc7$GPx1WdC1{kmRcDrD+o1Z zuS9DhpIceAopg-1fq=oGsaMbAr^(i51A5NxzCSYzOXEO-3AkT|&#j$5)us!R#l$0b zb2l$~%aOP9iLzJ^%-OglK#~_R zy=aXj+q)Up{%dIoba<-Tl!BnM9gAozlD@Mx0;oKa8S{;=`Yn|&2?9FzS7?o*icG$o zd8|?*Ico1Sq>0HoOFIG?7E)csonPJXGu-!qmN>4ax#`I<=WI)XEK1^1{iBgdF9ufG zS0x@VdxY%yk_`O;@?+63KQ69EyZZC|2B{o!|G*-RaVVBWmWgiHY<6e5Ehm>DeVT58X%Ms3urQ1b*m=nUo}c)vcR9|-m@ zEEaVg^-(ch%>lcpwqpeFyC$L#KO^qPWN+3o55aV?63xS*aU$3fz0nY@lqfxw)j>2s z(cybV=|ktvCoHVZRc%o{kwIv%jx0IS)ZMh1g_}3I;mTH-QhgU}rAxTN4`0zlxrWHk zB)V`4`|=l7p5&bMD^ys+>Se%~V8|u~hI9DmC+j(rSo?E;!+6AyS9k^^WRV(ao*im` zt26ZqmB#I8)aJ82N4nncjq&;ko5^PLl2Q8eZK4JAhrsT2$Yi6Szo&*#uXB*MF`BESAz7Kz=*fV8^6BN{-#&jrh-J#YdLP;`+J<^IE_JIDGXlXVT`&WF)0@e zBQ|sJDOS^iV5|wiOqOMg=g^-B0)DgvZ#UbH;``nGb-&VPffpoDu9!CkW_lS_xh#Jh zX*?C(4j=zd`h%mIX&cDs8b_Hp{QieQ6yr*nAkvIc`w?@@Im%pQ1Wx^MK0uh`>)fL| zs?+MQ;Iv_}J6=fH;A^P~TX%>uyJcfDaVg4a92#;Sc^ERx_O?ZPGd$aiai@Bpn^Xb# zlrNHyJi4Lq@$&6YVBm9>1<)bGKw2ivtdAu7z=wPsOED=$QpwY2mE6|WfXJB|DlBeG zDpjP%<&Y{KM{yJVt-zw*T%F)NJ_sM}EhX>A=h^nHb=j>=<+z!AY=tzjl?Op?T{tP* zs6H`0J*!CV^9;XnqRT>Uo|dooTl-%nL^I@xeG>Cf)~y#8RXTm$+5++YKl`BdYY(J# zU65x!KJP5up8A^=!8qaR`W_>^8(L2>hvAayo2-mEs;&2MrAWU=NmPjxx>?lC_BNsTfZ*5Hu|axpAU#i|Fid$Z^7=Q5cD(OlHV(o^VSWH-jR2?s{?i#cL9b zji@q}BR2McxUfLnFyUaRaVW)3RU#g%#I19jp*g!9Z5B!d%TM7QQ2;sU>NB#qD+#XU=oq<~GXcwnG6fIi zU1LfoZ_^uF>jhq)tgtD_2SoD~Xjaz}=;GsOrLT4ADHDO&LpxkQ`_Jcmk_CO&c$ly- z-HO7jPpNrnkuJhr$k3WgEWhT4ad&F91xblmeW@>^WKrLm%;sYEaZz((ZRGX=4Xs9K zPqLcA+~<~RqH&YWo}$&^%HDlpe*ZFi(r4-F$m|~oV4||n5L}6)8|VxlbE${@ZJfnv zlkv9S@A?@Em%Q=zx#f~i(mhGz}f?T^&J6107Dxi z0Ii6by@TVQeoHF_ur_ftC17Huqm?0GpjC3S1<=YGm@7J3{e$^$x|OIPEsH}B_d)py zYKpRMhXVbBpHhiNtHiyO&2rYXeXg!u<7np7BP#)bWlEXUpJ0N+`}RU4QOGBcD{y?= ze>mUYl3wx?kEaqrcWCnUSV(>E#_fXE#L}r{t7=JB$*N`Rx~RPTqxw3RTdqPl({o$L zS~Z4f&XSckI=wy}8Xr-RHKqO0xqxm%dVimFrMer@FyHmp{t?|}3?`;nn!qO#&(t$! zY82QElDNg6xe%(leur>80sQCD4%&@Pjg-RJ*K+OLO?+{0poFqStAc`>F~fq&!J^VA zaeVEA1~NU4m|uL3tRI-ngb_qUC&gaFj_$8`z`;;Pk3)7|6JLm)o%OM|xlkmFX@hnB zaM9{C3eW++_SY1n=n)pb7kQEphwS1Sj&hEqeDlu-pza{+^YzuoRze;Oh7<4E;|{EX zh9fY-a3_D(24c>P4O>E0Sty_2%e@|#-dxzb-U&!sl!gXvpk!hssx78Jk@h*qGs*QY zU~Wjm==~vEN;0B*uXJ#O_L(-J^^t=|3kW&XU$i5iLmM&rz|*aPx99X7A?I~UYy4EZ zUT?d^Q}-a-M8)1n-f)r+hOWYU4Q#>9ru7G}qSm~j37tL|dO#iPm$d)xrumCt`kx3& z(8k&kVD0EY!2UOQCTMSCD`4ZQ`4>oJp=TgqWoOW)l?E7@>Hoj3%IVwx`Q#r6_t#KK zfmQ+FVB=(O2yo!yq80h`t$)4H{R8X$D+2sG_vHVVMdg2#`d=bI20EsH3GT^M|6`6L zf-+gZh3U4q50+~7_<;fjY^k1ryi|*;3GM9XmJ3+ht0JZ#X5M;2jpNYXd(0^Exl4@< z#SEqFSAx&x=yDeRR3J`=H(Gpo zd_+$gCtMeXL6v|6qm!D61?m$X_hei(l0{bJk41^EWt=a_;97c>9`n?tQfvrs1V;@u zi#o&}G`6Dti7(DbgshR?s|yhWt||s)7+IA&1B|gQ0JZev$e>xWS$6dc2bgAk+K)UL z$|16reJ%kAY!@O2lB}kaQ~o34!a25=Auq)rB6tT5-+?@~DI35#+cEWNNq=X_rhsAy z0v=HkV|_dcMwilAab%Y5Q9+-+N(EH%fN_=k^L+H3Jv1mo~uQt;J6v{ z1>%%{u}Icx%gnMbs0+ep0qprDsvWqsyTvV)mLoC$jtwx7WI-=DHn^g7s4jpvedIfDHUptZTOwnnF%io`&!U=+ z!B`@#YfG8})~f?#NWY@|AvSG8dk617DgrZaDd{RjXU0lfV+adhgvmO;`skFRXG_Mx zq^DGk!9buIIWAuw1Q)i5nYw>{L%$?SFSb(xdk3M;_FT)sa*T(3O;iE5XaZ61BW>am z@p360Lgnwq?{qe25F8j*+d9^e%5V1S&)Hfq;_a_E%ZCq7z}?+B!U`qwV5i6OML~!K zsiWV*!|yg2L}QNCypybxa7YTlU2-}vh9poIs+xF(J)wo?krKn2GfYp)P9 zakMHl1+Esy{?OiFnH{unq5V7tRq7}RRPcQ zeYk=B3v$rEHKlOMz>HpS5XI0TqHqunL-MbGrf1_}Q_kWy(O<>o!sI#cmN%QsEu78` z8#)NNu0V?!2dd->tBFTw9*=K+TsmU3-jpv>vfZrjJ&@y1ot4zn>{RD7&9>RnfjS`r z9M1ZB;TDB$Y@^2oZ#^!r1p;pfGMT(m4G*Y~;N4DO#)1ZL+DgsoZN!yRL@`nBpE0Dq z7Q=fDCD7{Hr8g`;p-x+V!O-1brf_MtHSZvrDNw0SA^twSeru>mgRM2C)cCEo@)12d z)@l&52`q@OAQRb%b4quZt;R6vN=84$Eb^qn=@+i~98389TwB?C1N7dki z1JhEv_7=)CBR`}^WcmlBJY~}h)AuPLPYTvcYb1xP+g1m2Jk+ZeD4323L|07+tj!X> z7t;Q4emscBA`y69w(NY;1x_ygovd7Gph@IkpeK+OwH8%wN(%lpT4<31CaN$=%uU3l zp!{a=<|4r-UXUdJr3>6LbZY(aC_8-5ulhhw+vnO=pj;C&fI^wwK(Aq<<9uh*d~m7l z5~u_#lxvE0FRp#aGI{7-(NDsl2Og0qyYqB!o%yqn(&U%Buqv%jj16zNK`=$27?h5 zQH-dNf)QInO&1u(=e}WFa=Yhux3F9~HgTo~QL(*B?5fv83LmHW`_!uYWi@*FfmCf4 z!KZ6prg3)e4jbe*7|}GZzTNssYU|gBJ<0gW1X0!$Z44o?Kzil)EGo>=i0^L}DwVJ7 zWQ9FH0yC+ix5~>#@O~?6h(t9&5OEG>L~?AMf0Q<@vN+b#_aaNlFEm(Q!l5$m)zldD zk&}?-C)L~e!~0~)vQ?e9UkiitSog2N z=?C_`B1CXqIDm6&ZAK&NOVNZ=c*rC#K8N7{M0^`QJEzIq=q+%m-{vYBG_rfnvO4D; zi{Xu?tuESm1poXpdw3f~y&|239cWpK6+v8Vrec!dcf1GUIe&0tLrOjWR zo}blSs%cNplO_^bcGKz|!X{m)~dzcGVE{)Ke=-xcw){}b2f-$4BTMD~BYto~*F zRrtq{8&w?X@B+Wa1NUVBkKwB7Q)D#~-b6V3u9dm7(ifOW-bkimfqfzj_&op%%&h%g$-Sb$8U)U=mlw z0T-apPNVx{f`7PVIl|_}GELhEHD(LeK9&JOL@<)Ni1oU@+GW*TI|Q6s@i4J5$&1(R zpkjoE*P}u*(>Q&lPm~6bmzm$_Hi>eHYbbCMlB6okw)aI@YQE?Bk{EJUC5d<@TjZhB4PXKhFr${VP20SOF$buNqLlt}G zlquUYOM0pXD3_2^P(__Q3_`xovn=j!2u&eCArFH!GZ!)H z=_c9XlQv_sP@qAt;7%zLmlPGA`*SfI`T0N(eiUoH$UvkB!>_LHluNVn=kmc@`K_-& zz=EArp%~xI0a+3Yd0p?KFG;BY(g#HqB<)a)g!W_^YasFN7&~gJ-TTIMoIewncwzl6 z=C`$e;3Kos3e?)T>-+U#ss!LABS`r%(cL}-uHY(!&vZPzb^SF~ zVHnCSdRaG-f`I#D(l^ODdPF{b2{e=}&;+Cr_l;iyQ`e(~v9)2Un5^}O43w`kV)P6} z%9e1aEo$83Dpp+fOW*Y*1|T>Jv&Gl8l$-FkDwfAh@@Ku~2Tv1AEj@g)jVZH3KKOdQ zKCpu{z5&-0Y8#!;^4ggDu3=`xH`VQDW6q~r@|^a&8vwhwE>j-xZqY-;Y_7Ly0tO8j zrq}9d$j&8{;T+3!#MOu22Tyk0;wGzXmgd_y7g08Ebz*#9-XAv4b)0C)!8b})7VPUZ zHjf7Znwy$;gFE0pd@w!oFg44i1-=2XUWC2p8|f%@dt$647(+aXdS85j@Fvhnn<0do z=62=L;yDrNqv43s3=lG;bLP6R5a*{r=&12wD+B!#iv7K!gWfQ<@ZoN=0a4q=x8y1dLDK08$Cnj2(6R1y|tEMWv8P&Z@G6Pgd*|& zH_1tuW%?E?28@RskhP(nlpYFiJJ7$=eLfc(nN0zRX%7+U>m!%qSKZl{QPD* zKetdnNa=ZuNinfog$9jBHyG3hbeQ2Lx;_^U(r<(Oxl*DhJfLbjFjy+Lxo$xH-+?1& zl0NG>EkX8KSZ3`&%6I%Ztd+Vxtc#q{M)wUknSVGyai_#@IbC!{B5?~Mh(=Auhf7%s zT;;<%8VGPAr<-4Xo>Fq=Rz$~Uo`}fOeM*KwZ7jW)^VuWYtkwT6ZdF#pCaoD?7#0}1 zIXcC6aWzYtXk)ud4pXha92dccbS@^*yTJZjC_#()rv*`KL%a{m3yQ0HA$u{u`(Zpi{W1v7$OB8(7w@J7EjV}iy*InFhe>M3ZJqCNhkge?^mByJ&! z9I#IX=aK}(xjSoCo24yXbbj#c>S*^H@L~8XYD>TP{mJ^1gi>UGvfCsg?Ca?1fY|N! z4sNq+sM`mw3xUOpRbjYLh5T&o<7+Nw`Sp~GY08&byzxxD!til_T=X5@A>?F`~e}EqHf$l77pay2!pyeISSTWg{C3V z=cm*D+46bB`pM0`)9d-|7%(r=6<6S60m{JGHO6ONq!kI1cQ8yZ>8kJg6D^r+xeBn1l6V)kLBGwO` zN8qi@NCoa~lUs5#`D(&g96-C}_95h;kzgXFwcsj|1uPhx?BCw*=b!Xz1sG{}{0S(? z2W_xmpX|X&q+8>{p8Hwd-jb1_ol~ zR9>6>u3z-Dd-4ozk$@TYi}5@(q#RZlzCv= zcX%X}ZVVm0))^aiyIEY7Gw0X?+ zm%o!Q?ZRsXQ~3Hbpz~$l+@XD?qoY9~k+r}Q^S9E0xw+F2`Q)uvl|m5XH}xXJ@=UZ| zC60115;B72HkROw!lN>Q2@Ki;l1J72prhx1Ev^d zX>{V-rl}sKBZ8AWJEn0dNG4TQ- zE!^pkGuo(|pc2j28IZpWx|HhATuGue234*2?!s+$rpMbGmG~R|H8uxJZ%92Q--b+LnvfuG;(S^RHerFe=MRH+)B?v6&BFM=`$QctW?yt(SSw@VQ1R(bE79xG; z$9$$kdt6}2^h3mMC?S@c(+9J~*?g~2r@H2`Jm%I{)kRRjZ1HAmtt!h~)0edCGb%E^ zzr4TrOc|^rwP}~Zs4wa53V{ju6j*w#DXbGP0?0EuqZwWdKx~%*IlNR6&I$io*TsN@ z-WS^GIjp%h198%DN1(yy&-}fbl|D8f4+B9n1jju~px}r;Fv{h8n8MGeQa%w8G5)2c zospIO7%P(|E1Cpqaez2VX3}_|g2&H3afN{?7|&XYWuk4ID+Rjysxy-6mV18s9S({N zHV;n1iOzjH=5$yP%$cPOuB9adUX?qZfnUGGXrZ^2qW}N*mp~aqc(yd}`QzXYN(}3cC?teWVJHd{I_`shbC}`rj#HTUyn|XUft@uvaOaw+G?9=VAqSxpL*dWI%?_sP(#Qeeg1^~ zT7GIxVRrU;RUpuMN1VrdfiW0rE_;g_Lvf{oec z7b+W?2Pq*a^jL266nyp@f5-`bh<&%GXLN4!cFm~dL72t5ChYBj>j&${oW-I)BMDe_3{X!CT*k(VfCYQC{R|511vb~<`VodwQ5Alfkr6c!}GM2G-kL(f;ON)yp9 zi;t+yEaB;IhVyjEL?;&^+kbDp&8?73ff)#Y1PM{G2Y=ZTu?=o{!lNM3IY(g+3K$Rv zhW&GAoLL~c%IZ2W7LQ*NJQ8L0lMT7?bp-7vc+h!yJl-F!N8t6%1H_@(RdJrHx{mpO zJhj7SY6#MueqI}Y9NRO~&MhfGn!Zf%)ix#fI84q?E&?8v!OSWqKh${Px1&jSLG*q1 zS^8kji|3wY%~tRCGGAuDG@MLW776y@o1NOcb*xwg9hv^*T2@*ml9qa_XZoejC%9|) zMF+LzajP&KDq4KF-8OV+S2S@GT}31HP#{RLzc0iiLQyXa=jhlzEDH<`EKAx50KaP!5q z)h5Duj97oT<3JxhgApWzsqOhg^EFf;FT%PhqWF@xIRcMa60`1yZrXm+rKk3?gem0e zm2A`cqTCi}t92jQvf}qUWc7ZnFSDi>wd~#;IXH>g;GBF}h697ghE1+%M0}m%y#1a> z^BV!0OwAat5?E^9CSckGI!8sKgT^5&h?C-t3H=}V=m&5VBqF#xcq9k;%Nt;4&!=tX z2g7s&J9V3AWv(3H^uZ&y!s0zAEy|`$c)TG0b-Kpu^w`S`vENKM`r-8t=iznOiNH6+M_Z+2MVwT52GB_K6)g5c>eH+ zD@o<_2U+qh5(HTrQAf^JC12aXL_{0|UgYY8T2QnqL!f+BY_zJx`9>150N%w6HvEuU zX#{`^Nk9ytBJLJuNMRfv?Hsu9wv0kBCiYfB7`rvo;0I#6t@=ZFPzMNlN>Gi^$n}&Q zCY!(rWj=TVy_M#*X-Ngj-I&@%ueM`ttm;n#(u+LFKrW*RAq1Sl1Nm-1vP2`7E3Jr| zY$sHQ=pNm*j~4BjgK1+R8%2;)Pr4e_;qHdyxL=Th4t5AbT?936yUMrW*~#gw&x z?2U&4Zwx@uXy{LxrEoMTG5oY#QRR-L{Q6nhiIrZ)ly)sQ@?BDyubjEmH zpMZ!P_DoNQV@?X3K5OVxH`h~9t)$XNfXD2HToD&4*yJDg?0v_QyXE6za3m^(R|)JA z6&GXuMuv&ws+Fjh=BT%v=#TKJ=Z~vn6Wpz7Vi{>Y(Nl&)hHAWLUf-cvW&E3ax_mw{ zS1K0!X1qN_*@?Op80OpikXghSSC?ndH8n1wIgBf2%QsIu@z`~_O}2(-73vf06zUCM zg+O`c@UFV2Rz&)o85MBY%Bo_hI1m!(LEqd960o}ZJeXu<+(*9@zaql5sEj^pg%B`2S>!45^_H;>h0x|vlxt3Vor%sVr9_u`F0gyvgkQ_GkyHw?4Tv*AXUsyTTAv) zqOPFvnMs@Jh`01(>1j|v*a45wq~C!j^Ym_LPb9Hd&xP>0c^F=RBaY65BYc)=w6)NA z6jlz0T{Lh|D8W+4Y)H!$abN_nGWa_46R-6S!DeG(j%7h(Di!W<4X!SV{loxUokbYp zqZ4(Kz4koll(r@Pa^pZVpnd1)al#;=VXj8^0-z_Qae zC`NeB2vqaN6Bd#bCiW4@nIIdQ=fYaDlf4Qlqs0k}mWh=ihfTcB>^ZZt>ua6{;u>c5 ztk0+%v8CcdsBZ-T=P1Z4VrUWMAF;~j=Gmb;WpaIu7-~uFzlhVqhD%zXKuTAVic}D; ztK{4r{UZrpUNw66Dm*JBP^rIVLm0!t5-M$HoaNculZ~pj@kWL|V`dAqzml_{CIIrb zcd0zORsEJIY3iHP&|;Jg%7CYArg!`WDu`% zzgQ{?!lTz&)8-%y>E?|=>IqzyY)rH)%le^B{fqP6(d6yqy@#jY^Rv5ivxnG+Yug)6 z*`RZ)j@R$agSUD`wI_tnx8+%%7-DLOe96e2_9$Qxx0wKW5wg_#kcWFmK4Y?XKaT7! zb*Zkn)4mN=U7yECYtkwmd||3eMH>Bzv6a^j(+3HDke7U;({6=3jaJHJSH@?-2b1(Nrg+oH#L#{FalW{C{%q{^sgm{lo6 z(KK};Smm;+8^mATE3CAif)@3A@t)E|-5%5l5hbyHcMO0KoD~$SEMuar3lhd=1Em7B zH&jK7){9{<74^#mN6NjMBM*-(<4a|?m5_Vr0;#BSodhH%$4728T&l~Ho+|YI%Fa-H zK&ECtWRoXowTIjlk|f4VHrOYLUl)A1IU}hq!=;c*(0~YIk&z1*sC^Zao5k#9BmPDT z%Gp|YmP~MEBo?IZA5{hl8{Mh`#+9+LrPbBl(rUf({V)@R7s~bh<;?m0c45Qrdn$YaCMM9%7V_B@zQ2z=9ItL+M3GsFF31BM?il(80Yx`t@1_EDpDB`(%LR%- zr@zI4Z&nQ$gIqrsvZOEEzCkk8XpCg+o36{LOafi$&HE1=Qi;@sJ%S8V=!AGmnCN9X zv&x*4-9?VX@#ep!q&YKqjfkw!pgF3JLfyI&aI+YW~UYqY;NY?=O~<^cs88YbQOUsj8B0-82p! zfo)>f<1u>|iK%$VRtsk~?Bb#~4e*4qw5|3*N0l&qAJfiQz_prMQ!gtkWe7zbZ z916*KbjllgMZfzGky7+KDpy4>a-S3o9rO8LX)>N_jZuBlxGzNl?FUj@Lh=v6ruUM{ zgS&+lPU0Jj%_pWV7mB`x4Y{4&PnM(D+Iyx zicD7VVXZm|;5+N^#oFQa8y?S}1v_MzFEbzGEl$skjCrdIil$2OumQRps)Jlv2M%MH z?pN=WgyR;cDstQ8?Zppb{N(Mdqk1k1$C}(t5PTYFNx_R~t91=LZ-qxhZY~V0L2wds z!EB}uSXhb;Tc#ku`D(v(mxgDIg)05tn8et893$d&pMl3FbiVp%s|-UH*Mw^MNFxZk z0q_Qj0IC7LHvg^opv<&g!3X90E|&3?0_A&qEU^D^4CJLW5TzkgzbK1&cz6o!^-&V+ z<|;|=UL4ra>u)vgt^-Je0(Ry!%eu*NhoIkzO&XIazo+4e^t8_U`;NuBfj3Y~_l5AU z-eYbZ<@&L>YTk=!)Ve_?6YiBr&pmG%pDQ5Kc+QdSL#!O5w#<#&Q(Tqp(PekC9U^1M zffw9*KfZgi@nA^bXwrVqpAk~Ff-8Wzi77G|4LUKpX9=;?x?Ldk>#Jc+QYi+Jt~4)- zs4XQiYa+t5ycq()0}(|rZLD~)YJ`J7mWGEUZAyS4P{1`NV|vF4w?d7o_xEVSW$!3< z_-bn*(e-?9!5x=4!-3TuSlrm|lvedgUn%?8Tz7nGrfvxzXBe%U63yo@7U05VJT*^{ zs92{-tEN9ci&oN1fucg62DM2kxAb;d!S8TV+2>fw>J%4sNo%2)Tupa;ETXQdqOy{7Rc-89iUl zA01Db{H@IMO5Hx~j@(Uq{jRrM(vr&RQ z!ht~X(33u4)vy*}F1wg*Q`0axLy&%;+S~QfkiD-PaVVPx@(YwjYDMJV^k;gyzvK}A zPlg;l-QRMR|Hm8j-{la0*28~q#SH(@OZ-KV`@bRydZzzeB{Tnd!+&Ttc>X+S6>J>; zkUk0M{zmMS(>DR|@W2DCjsE%xe=GaH?9~i(|Eix%QXjWpXZep_eHxvh%%Rxtih9B} zMO->Rnpb&VQZ9v&2)2H4f*xMCe;vx(f=NK`M?gTH5JnZ&0{Pp{7JsJw_T(LON{poo z*Q|hK2|L*5QZr9D88wn^DD}>=1FMs;JMs$@IUYsdpJ1$Ei9jr4z)60pVrJ1*h?r}l z(L=&6xMge#sF@*u9T`CN5=j}BI-?P`Tkj%D9V*e8ontmTB4om zf`)zfZ!c9qE?>FuMGj`_hkTDec7j`$^|ovNBS{t0sw(VLDL- z33#4!K8!Bq-(CKy*;nI_9KghA23ROj8^Dk6_fN0;)#5v+9ZchEUwLXn&AT|}5Q>3d zTN|l|RDY<+-xy4%z>ex@@u0s?#liDTGs^t>h>k5K2*Dg zUrB=`twxyJ10yfNhR0SCP}aQ20C!5h+joOI*ih-To$in|K^-V(gj&5NZqWDsuD_qL zqQn4(pdb*Gw-fNaKI;$nj$VVPLdzZ=D2uyavoD7zSPp@?$JiX%Kg!E{6PuTdWs>;0 zh{-@m@D*nFmAz4%ori?w6QTB~FIYld=h5yvF{8(ama68xCsY6`a26yCJdP?2C`Zv8 zlqd`7Jupxp0H>1Br&i^d9X}l1avYmr-^VSM%_&;c7jm{ zHUuWH3)1o{Pf|JJ1`0YG270;i!DILrH9=mz-#=W`}y+VHV2CS7S% zUv@!?5YtcJOJvJi(_)2}UgmtkN6xtFRM^K{a=v4yajBv^&QZB14B ztjda=w5!}Z$KI`#lODY&7O;l?C{sOw4x2}WtMmWF(N`qWr|w`8ceK1Dty&&CZG$Hi zueoONj(p?n7CT8zcW$+sh#?nFt;@$+BnDF}GI)nVD3{K=B%O&OD*}Wr;ojmoy#wif zefIKt$$o7j8^KK8-I7&BRmxyOD@4l)tXX#PqhU6h7;P%_QBqMq z>(}F!BT|sMYm}u9p zb#Z^<01dBgR#J3vfd&<`#q3`Xn2qhzu9gdqGrAHPu>Razm#09Y5G?(WODgay;{5gi zXrE=kv$HOSf$mfxcwFUZu9&!xEb9Ok7prG+3L&bLkV`wX2CQ#+ODz&h@Znb7;&~K{ z3J_uv@HehN;M&r6EEr@>4MgjU{jB_d>oq35aT%E~8$ zhwoq8Ju5UIy^m;e2B>$5!Kjr)bxFLmlEOcj z1dO-cH!EvzVa#*1x3GORWG={BIC#TXyj!%Vrk;6jc+;c=ardR9thR4&_Yb+;e8tQz z%S$@dpmR|F(8~1182L2Kt3S=lEvr$zWLa&FuyYnk!w=SZf7m-#N$?R-_36U@K%Bdr zkovbE=ikWy=;{8KbO3+HF#oCp_&fCZ??*fSt-${h&d{^5{7WoT;UD(@1`E>U{U@Iv z545+vU+OsLAOUl1gd&#d8k0^AcsS39#hOZOv)CHjOY~6yXMvICH*v~Q{#c{V8Frkh zKhH5H!tlGh>Wk$)0B9^?H;@6RuXgEWmZx;KewOP2`?`F5xk+x=P zLd2e!pTJoN6+3|n6oq_fCYa5q*2fIfjyT{vvI2;9gB-Tl6OVw-wF6qMD|J&o$Gz5A zJXvPJ=j{GFDM{29V#ffIM?GN@5d2My!K*<}?_+TeTNNZ%CHWv*7ngxU9>`6$AMiU3 z1Ent5t1Skf%Wu}H1#o%_3>_QQnB$twGSNi`=^>u$Y9ebxLJAZ@J8Q=@>%?Yq@8X2qt^(ZJHsLM=u)1M?5>NG|YD1wqBP(`1#Sui#o-VfwjQ~JmgLHDG;(d)>F;GKb zN3Hh4@l_i$pM{D1P<9rOhEKHU@Z;l4ccQ{gE<}yrjb&A#5yDkXf#D;~3IC|;eF?T+ zTpxoGD9}_>jpRCaJIvYipq_KKda0M4)(=kB?Nc#zOe7RR_(~@Za3k#HPZmQ9tg#>r zX)8fO*h9@^zH0;^iw6OAhu@}?1sdV~M$Pu^opp%MH^;aswoJCawu6i*DlD0XsQ{gx zpKmpQY58!;NNS$8%`*#HgfyIDl(8tgU*j(8(mFAnCx*y$KiQcVnuu(yxh)G~z}q-> zBVT)?)HVVhIP4Vqg+imucb90U6Vhvf>IbQmM3JxXZ@w`LJK4iF46*AuBBnwkG^GknABZ?sBSsfA6UAtqE{WD z%X3f*egL#vv}&Q*_QQ4=rHy%hC-?%4S9Pm`4U{wVCSOA7N&Qe$1eJSmHi=?4xXFNx zi;$@#yw5Fxj`}BOTg~;(Po&`0Cs zWC)(MuHVo}impu)!{jbb4_Pms`KE}b-q!kJvhdma@T^_q{9{NnZB9gRcK^CG(1<}O zj{|nrEe;mU4i{Sssp&vY z5)k+**!ZAi^WcvaEZ%z$XJr?RVN9CQ?7}o+jd_N&=>!ilwyD?*PhIDu%Vu=ia#|9;x(`6UIM?EJP1PTsw22vSgw3I ztlgw7z!r7*wsp$qvp5maJ*pE!?Qnv-Y^MN|Cn&KMr7lK!)NVrYv})|1{5}$l`t1nl znFg>o8hXKmSw@bL*TVgYmd^)^M_Z0c@VPe7w#X-w8iBwnGk|-ZOc8Ji@NbV?V!*pU+o%41h-u zB&bn*rNhz~kG;y^{#-uPWZIRcXl#B9HFxTlV7xsKHRV-_f2Zg4{4#muB@eAx_**IQ zcQidc-Tx3y|L;=b|0;+4qh9jgEr zhI_D!0+?PLy-3IxH=v=jZt*a7Nh$N`9a?B*E`Bj?9t8o_Z_BoQy>G{MA9-qan@$`j zRU-;Op$~=>!27ffD^83|0bz(DSv)>FdPlqJ6|YTmn^XK1C4=as9mFh$_oARY^zNW}XiTJHIcR~_v1^)B1E*DH122}`B4n0~#fQ8VNTz#nUgZHmDY($Fm+f4D2Gg}H z;Me8dW9sen0#Z0Z7v&uGZWez>*zwwcEh1r=Lq{su>s+VPJy!nC784+5PhP??$SqU{NPZI;zY)~RLa9{( z6a}=EB7rMcr5T2>;MXUmC!mCmn1>Li?ibR_LMm01vQf#c-lSEDy4i{Pl?0(a7!>!G}c_!{X2{S~7r3SIF^fo~UoNvhelr!D#; z);yFas&5KV(c1i2bfc!9O;iO~u`V{B9i;*=Y4AO+$k=Wfr(prStk1sBP)(Q5NU%E^ zIgTkhN-snL2fl|D$CW!+c8{+y*(8MX^EHjM)*)NQ%xnkk$>C+j4OHt`p$yIY(*!>x z+`K{kr)g0+wx7QCR!;Mfn{o(XUB_6A&qHZ<7p*thUEPHBrJH;|*D9gERxe0d%RHVK z)$OPwc5fJ6rz3y}j&98`f)I_06@Rs4`do?P5Go?DjKjGHftg_`NsxostM z4fBV!ODSqZyhgA~_knQRIPf4r%zlxB4`0yN&?I~U74{ieAAE&udpXBQ2SJ9PvTm$WT~o0S zh~VR?P$swma3phw;z*kv>JqD`loc(llI&{fTigg4y=nq5gw1D`n=;4d+OQ7BhszB% z_Kbk1(}*!J67R#Fq97S0-?15j%y0TrBi`(hUh=BJ#&ozJc}-)M>o$)-fAW#$ zwzm=}2{XOkuMIfc48oeri2#dqTUbjpaT?AOVY5%B!|dyKMZ&f3=C?%PAq5$y*e{b&Uoy{` ztxOQt8|Q5ryR;u!cg4|2BC|Kj8^Q@btu1r{cd6xZiC}LU*xIo{0h67mRFnAn9#glR1uxm(VtIR^otLp4r5M^uf;9RE)gJE%D_J?q966S%d|MH349+-&d8 zC5Wkp@b1;#RF_AciwBIWE(7GxPxxCKG6|x_E~Y{$pyip8@w#=tWPaS`*@J7*xC$1a zo$B$~X3gF)X$DmNw+plq2WbK!_>G?%?)vL3{#h|J5JZEEIK&QLdC+ssx}AH{OqM+t zb13YZG>@IAW29S&ON5f**o?It%(WFPq`(o>73d^P@X=1F>X#paV%W7Dc4qO1;ydi< zE9M>0RNG0j6gpAY5%j4|9m*8ecbc0c?rlBruCJHV>_fkxCNmcomS)%nKt;OUI2E_& z*`O<7EHXh?CigUnAVk;5Xj0NGatUc!&y?Ly+zjtvD>xbjq`4m2r`GGt8uP8qgCNs| zyC>T5Fxj(*+?YY%JT!ibT=4GfbsvXfH3387IDG*;ajlRhao|7-6+VIj{A$6s2=@N^ zddt(maX&5^W6`CDUx>o0vX;T}VM94xbu}>TE|7&h*adw>lqT(GhnD7QA3w0c z_pkf3Ce7yyKKb+}1;7mGc$GTOyFvu;WKKsxMO@XcIJF$GztWTrXwOsurXj`OYWGnT zv_Iq+gV{#_3VkTiVj0d4BwJ%1Z`u=l9leZ@1+)y|=v=0};NVIC-66<=r9~+hJLy}c zL6piyUwr4tP#&ExNsPQOey~oI{p=O;C&5ToN<6*XzEd{Ik(ioFK(Eo+>tua?TAn?g zrTN%F%20yV_YDN5a@5~mmS)g=-3(LDY~NN~fh8TKIQNO)4JG5fCBYRQ}%s7XND6{Ou8 znQgN1V0KAH%fid{)_J`Kb?}YSzbT33CFR9GINNz!O-+m=xudoAm>dm5bUR{WqG9<6qa9tiG-IZ=K8zo8K?Fnye zcd`3%5EdZ~r6l+fx=~`)?Nf?^MJ*!y2jQnEi^Dbs?yQ@2I^8H#Sa$)!fI;lBZ~)!6 z9mrL8JxYex)`TQNEZ`Z@L7pB1=s*Zp&h)2-0VjxR4f)pGv@3T&PX5rZUG~69VqI~V zvNi2p?dr|UMR^y(L$4#y&Ll&~W(rxa6sGLAA@GA1K}uxoJ71^IxOJ(78%A>x)~+I4W=woTOB1nh>s6b0e;ia>b&m51#-Ng`qVeC=O_dI@CHIVS8t0xjhJo zdruGR%>nH}P1j3r8g5q82$>BWh#!701UHIZ9e_5+oH!d!=NT z-JA}DbBsF&=d27E5klQgLgjSq9&BskVRJh@(VbL{@R#4|2Q@ zccNAad)JRdje3b{i5$}`qIe2v+B=oTW*gpg{oBCJ4=Mn2VM}l%cv#$j2M4|l1uK&H zQNsqb!A@bM=pnws@Hx(x=Jqh|8Wc;63QKR^t4&6gRvU3)3v@0mg#Ux^1URpdOjg^& zp+&pc^+jpXuy7g9k3%MmvmiY}jX9-;fct_NM#$gh&pA=vEKK6mB*ra$wN%`ORp@TA#=5g2{j3}0UC}{=8e%(5g zCrtqEa?#h?L@LLa}N=YhXty}i%22?iQ%$+P&;Z;pdZ^YrZwOsKap zBOrSLMiA08ynVDZfD`}=H`c){6?wm{3xwF;@lj&3L0C$vMgVhaYmjm|$tR+W3j*tj zR&={&zp?RBy;jgF?mvwkte9Gw%kKPK+~`~cUkT`z*m_)fxlS5KLr?9X0uDwjDk)+FgD@`*j4Ar1pVp8M%74{OCEc| ztKi@Bku4&li;|utU@OOM+~d&3Ya;^D!#Z|NM!=Lo=>~&s;oMLHu+q-gf4V3ckL^+N z*gdBG866jF#k4~{Xf6DDSyGXaQE`SA#+JUzAxz+dlK?yi2IX(v+js=<1giDwi0M;5 z2kY~?`x_umBaCBkyptbaLB(sWG3H%{8h~wgatmHu51uef>D;`G^y~1cI?it zrxd-`cBDT2>7Br#8M@un(UXkjGTpRgRk)CJ>dYgQgLyE~C+BUwGCulBR}hj?pBv1* zGbGX5MS5za6XCBtel0aV5*>`Q+8!8R0GiZ;^oqrn&PTh`b5idcRxK*uw#n~DoWT&D ze%*}D3e?_XTKO)xjq3qQTGR8(Zpj568-=(vQgVU?2>ozL_|tFS5|qNnHoaa(a~Rqx z1}j5SP}jm8@#pr}AwcqeY1MF;d~L}Z_e8^OTlpGU&vKKJl~scVr!ljz*7CcnyW_*M z%6sTgr_GpPB(2<&&@%`8eWS%W%j%!H_HLXzu^s>0Y3YwggxpV6q8mU&tUX)*=k~8)!j1rWFWxYsgX+BjV`4g7> z7J!X@eyyJpP^pVV*%R5Lx|BWMIW3a_!>V||>G`y1i%#amDp`sI*cxhs(u94x)e^F$ zCe!K~hVt>hT7{bc4910Jptc~ydBH-K9c%CG!=a-b#%}tA?i1E}o*n$cT7LQfoFN@D zVMrg=g?`j6zhGd>yPs{ZBBJE?R}qog`7&+-+fi;!T_F_qe_F=wB7DmhFF(u?lVTgv4 zkXu0J2ayxkSLl~ZJs{avL~pl6V?ytZD$@CoLHJE_GuThAGkGZNVVz2=F06UP@JVgu z7TpiXJ3`O*pIFYl4iyQT1v}ZvSD?iZT(l+TH!B0TU1|r5HM&cOT)IV(ct=;pzUy^+ zd-deUvZsRVK2Mi>HlDiOl?{>Dm6^-yhu$h$6D~c|s%5-~A8$9Iv5GiVJ$pU)qCK%L zqPu3tNEJyXaF3p-ZfrhkBH3cRa-Op1G$#9wD{X&ddEebjMtekZe6nLc;RO<+r~X!O z{w>i<|36Oj{+|@U{<*6EP6O>fNXsg9N!#ypV5ZcUbv%fJ@a6 zEYYRq?|WCGs`?X9!(%Rdw^s>JBL(G&f02fr1H4UMcAD~ZvExf`g$t1$=ZFw|PY(#~ zZ2wV(jANrvv?F*v@M7`sdE0;KS%=g31x3)_l4Y+M^1ixz}1;6*z<$UBfYJnkPsPG0g$LOg+V@OqjChWc;YCh zx`+eRiFW)==IW>4sj&EC*&Ow#Py9Jh|LbQBj z-_v69d$@IRlZ?Z=XZ=)liHE-&MbQ2X&6|P?C<2}YK>?TS3G(DZNb>NWFNj%e+PS^@1CIk^gJ1fMd2~}Y#q&t3a+AVu_-P=P8 zjiq~SeKXuHL6&RY*^jw^NF8&2&M|yrAko;_=zTyBa>PB(06-dw9Ni5anPXMlh$Lh1 z8w-5lBXH6cHlD}lqgf9NTTv%?E5@uP4As>VR5akvO$o2V2R{A@O*%u{oSxk;;GN!^ zqi`Yd#Po8`4an{ay+u2w|4ribCgo}6fW}(Tl@9+B75+?`W3LYEHJl6VJv?KXGb)oq zpF0=mJxqLmo#s+@O&CTwO$yASmk-v-s3mqhqE|+PIpnWR6j;bZN&WjF2g86pf)g0| z`!+3+QjV}ywdOuCBUi6TjU1&fzuY^7(D@OJ4B{;Hw+Hhmq@pog9$#vn5Rn=qcn{W| z91TqxQo%hD;vf_);pz5Js{w7l-}Er6 zRXC7*73MX6;LvzR+02kef_kDYSOOJhYj`BpF^MXuL%J(~(5B zi>LJyl9bx31945FlttUy1sIcbDrQmD#1fc}GXLU71TgzAz7B);*J7F@X_v%F z+S?ZEJFOqhFzeXhE;q#*ar-};d%0m$>OWdDLPGj{!W=?Rs$tzX*M=p>MoZbK*2;0{ zi&>ieIq0Y^+dUxU>?x6rTR(F@(lK^kb4=#P`lOdTCS&zEj+k7>sJPzDJSq*Y$9@moP#t5SFzriFO19SJY<*!l6!nlFH}~Fw2LC zSmt57U<}Uad4?M>sYs9(Z{XTTWOVAF&C9L|$K&8gOc@A+9ojse zznN|;)4o_cc{wn>!gCxun*E)8p#NLm_&a{{|L=hydY1ov-uR!({x9_f105s#Un3`q zz8h>CBZymfnT{jz2o83B)QeZ(I~Z{!?nHXa@|BQYM=|7<_36JIOpxAOjePCOGY)E3 zj2g#!G@Hs|Z<$t~*}L#EU5=gPWI( z#wYhC*Li8bnnu!Qn8rchGroYBh>9u6f*|^ULpm;cT+ftG-L{OfC*|WHNhyO02s1;J zVyCj*&RdI;+JbeZK$3kQV#<&>9NS{Csg5=rih%Yb&!*vnkcM$bXJX`!7a(vs79aOe z)fxFSJuUiyx@mPHF3LOEg*P-WHx!M5I~e4tO{%Dp6?#h4GTquJ4E5_i59ZvY_p}`F zLw^A-df!+p+Qh>Dz8M6a)hkgpQ^Ao975WpPc&so-_sU)dUG1F+3*}?4TZylm*!B>i zS;c59O_Tf7O4iPSo5P#U!ke-muDA;=(h7TCe&FU7Dv;i5x8x>P<92cvU|+@Nsa!J8 zVViozvpirQ;v)6ThN9R*1WCFXrI_CjrKPoCuc zuhDi{hqvp_t*dbhm2>B5MFK%I{MSQp4PB^dSa7O7?|_~ijnW@rGpW4~gK2WVI1Ate z!XfEZ5mq1VKbkT^Sy=^%AdAc~%&&W9^fmYgO(&e?mCrcVL#|5lumDbZyg^8AP4V6{$I4!V0{hl;JV z2&9g~A-^Cua=7J%rwQy6uz~%>PLO~Ro^L6ml5Kt)MisX?!7&RkOeI>uF&a6J528kp zgP?+qfy)u?J2}$vCrF37!z4vB(9WmN4fy_Zq83ZVn2KwD)v+C(ssLLY9K0>5e z$4%8QC#~<*@ojqdaN?|k2@CrHi{8;!JB5l<;2I7{xTY4)DfVS)kFi`7?-9vI3wmS^ z6EK1VCY)#(sJdh$M5)JXszN{|Qy)($EC*5LiS63N_LCe^5)ByGOO*q}%o$LZ=L${Y!(g0X zr|wvxQrk6((1&&^gve3*8j7n)YsiB`266!C#7`hm9d;j^L>xxUjO|fTZ3?v%D1o1U z6qQOAY3-yi<1-s{stXcaQ8BA|O!Xl0pZMj7j3$o&16+30?9u-fqwy+^lFaZ1wcw-*n4NG{Mkl~_C};J7ZzvA{jq zPWH!+h0sB+=39xe1c{HbU}~Mst*|VvNW}2hmOvOiO1Oak;L3H{l1OemMKG29RpRhC z9(ow2BJmi2vsgY**cef#U&SG%P*JuMH%>YlZp`5i^)4*ONE!*eR;0F_Qo3L?dY6TL z40d)u{T7OtwL_k?P&QEL^nF>@_hlvDmq}`^KhsG40PaB^Nivz>^=hKrPijhG;WZ<5 zrzpID+f6cD9KX`Fzoc*5)51VZ5Q68ynG&76jbY^v6=&v;g8j!gOP`XRX-cIuQ@a{b zTy}~5$i0q`2cu!_R;T+b!se)$Etn*(_~J$alSImL$0VuCwf~~PLtI{^%AuUOi8SpH zeGwvQo>LMmMKm2n5m}%hO}J;%>=v^CT`{(F)B>TqE@!wo@(CT|7(F7k$ng7KzjG>k zaE4t}JpFG+x@UW;BKF;xKA!VZg9_;JhH4=b7(p9EVNvLB@3=0gJAxD33N+2g4C%W+C zkdj4v71sAfBRt6=UYpA%F(|0f1Yqz(R5W=m>_R|gdfu^!4iyf96imPi&j#w+WE#bJ zYYeFa`ucL9hRsImU2Vk?M-`jah17FDAQqM2)Ual=XER>akb9Na?JCkQJUKfJbSC4w zryYEs&^I{QqF5_Z*rjKVHJ>c&|Cl?c7Wn4WZ15Ny&K`d)1rqHA0*3}YQYZ}kp)ugG z+P=bt!*t@u}9F) z_u6OY+2=dnYQvp{qZkUHh@-(Vm5kTgADbjw!7F}{8*fXrN8NAxuD)i3FTeL5OU{PA z004!aNB>p}{RK<@o5ZUBd!#)*3*-M&q&))@y%L=q9TPo}fg>n3w)gM7gN05cP-sI# z@8dzB*oBGvC?rrvU|{bQrLd@K`ndh%#8jP10Hyup9M!#pBwa*nfMmJaS|-5LXi&*# zlB423oX06>#>HqaMakeGXB>yeh0h@;p2@^VMx>laMaPAd7$@KEH z*4?5iwZI1DvzrDR47STJ+9_MQO@|)8@I&PU#KAHP)%I!u_`Wlh+P_Z2H>4!1elh|Th<-^BfVoxq8k$z(U}OI537waeagpUUPzo79JM z5XPCTieP`res-T<7&2fDm~q-ChKfRzzFe1k2DG|_9{_<0AZYOfyg_zfMBR9R=*JL{ z0~^FM_^v;ke{h`|R;QxgYbByHa2pDgIA?E=_UC@d` z#&_jUDtA^?I(%-m{EZA%WgziLSH9MadpMI|lAa7ivCwaMiLMjNj$YncrgB#41;v%Q zrzLi3z%VKZ1r7|Yv6^$FUr4A)bLQ1?*d2+?6-B;CkG|e6Oq9w$V?ylx8wP#|eBh=i zBs6J1#1l+R)Dgas+1naLS`8y8-GEOltrAC>vjRrX7mQUsQFN_@9G+rc8$7pfnEz44 zmv*V{SaFRoUut18+60Ns3`fI3mE|E)lObQqR=nEGNMefyz3a7p)Fyr0%Grn!q|9LT zhyL=%>Git_xp`SQ#lGMgBNnW*D%`#4@rAeYQA)?6!Momd6rAxskz4sJfpNm@kJO~b za+q0-**Vs_LGL@ku>~e<(Yq)~W{}En00frXMaYmGOT>bmKq>G2Sf3;#YgyV}vM9OR zhx^U4#VmX2<(#tP>`ra^Xy7V;Vok&6j{++zdpqJ z?@%TK{l7fp{|1!#&rAMm&&b5U@YikRAD$6=6p-UbxtaJg2KI6V<0WwIxJtzM^{yOxuWBF@BLfiN$ENGxC6;S zpz0A`e_h=DAcYgsZ$i)`JswZ|ondj)^KsO2TmSy{;(ImZR~L*hr#@Qe+@^tWeiMZE z)wOlru2z^dVi2{ES)@^OwET72pEtO)F5rPez(mYblPpMVvJ#lt^Go+pSz8ANAsG?p z*Sr8?A%e!dDEji5b<=B909<@Sw6*%fj|&9-b!v3cBIW8@9r8q!4jtMf&W(tpe!&Bq zhR*f#Zd=p*Q{*=3YYS~w;%APYY8sEyPEpDl2X%x_+7{U6_Rezyc@cCVzL2OQtFeZk z=q)VmvBHx6)EW{y)R33Z-d2Kqs1i_zb4SV*7u8$8A404Iu{+l4OSuT09BJ36L#<(i zYY7pKu2F(=Ff5_a1+KGs-t@aSv{<_yy%+tXX|Ou&ViXIx?u~V~;FI(a%=)n5HZD03 zBnbL3qQFAblGiWZ8Gkyt_#`aI2ltJ{BgYxs7RtFOq>!bMDVz>_YOrh}CHC)F-pl#& zxX+4p;gSODvPHVb8o50xYIVo0^%c(am4FksgTp+71j9$zzt?qs+Boz_8ZvRL5v}N9 zOfJ`N;c*Eee5dNM5(g*1e8OwsR=Uhqar6>+U~*9DdPdl=;pPP;(Mb4f&=FP&pFvNw zaBJj(All-z93uQIB>NczR6TTcT$BB5uF-Lb#vY!)k>IzB^nU;D!wW~~vz4E7;%7r0 zY-9ZEXukoGM_9qklI?6Yw8Emn{HV)kx;}e^t`h^(!%RZtHx3oTTAm^OeJ(emJR(2} z1w+@`1&gd{gDvi9UmTfu2IR+uro2r7;w3m`7{!Em8Cz{-q;gw^x7R#if4n`mI6xuME5;Mfz7UPN&YQ1k0*Nv3FSO}S}=yWOxrjR;Grm@xyHB8da_5j4{rH_hi18n>(f@Yj9sO(0SWfhJp86_2tGMFm*0B7s;~ zT$+=M7jkW4U#|Q~kzSR@45`^q*kdsnRulA*@W$}Z_2P05RzDBdXf-(8M!*E8YjO-h zS^jc)@f8b&K4YoK2Z!&O*ZV^~L|jN?Z$C7slXoCeLD^||doS{zC1=K}+N8W-BRGU( zJi`bYGa=6#O-JbH%-I~dw2|01z?|i$LZyP$BMVrwOhdHC+h2bq##M$F!ghnJ!_%VO zbe0yE9h5Qz{0*F`C9TT?g^hAzfk%<#qx4c^B{jB;#$|cWE41$hq4En1^WcEzs}522 z4TuLZ=ykwk8cJQd0D)#BQr9O0qvAAhB5_?zf|eImxfq?Za(@-T5ELGNXRmpwVdRu` zZ%(>;N0%4B+VC4|vXK*`yyQTpy-96%;cnY#OqHLTq~tZ~=?xRCoB zf@1J_M_jJ+&DA1F3|-OPW7ICJmYhch z{!5F5f$-%?0xoH=vA}ClGPFO8VNj`iv0^*dnoC?o=b(2^m+bVeyj8d+f!QE%*q?o{ zW(@rcO7wCl=}DD-47LM-Ntf;|AyFdKAhFdx;nBKtK4WBCpX92^r?XI+6tQ?f zU7of|mt6CUYmtgOwz^ID?i*wIxul_nXb;5$gOD&P>nxrrH?((axenrMH-S$8WEyD4 z{&Dpm4wnI+a%5dRE4_5mfRZsnQ+v`WI70!*trrFt<^=^vjcdDfa-a890jZQFXD~sC z!kNGmbdt`48zYAk(hlAHkYj{SwIL}v>3m}si~NXm%9B$mdK)!uKzxd(-$D7bP`uy# zrdtFXB1yv7%?wwbIYWAu{grbx0IKlK4So?mB6E zeh;I^H(un$eTSC2W6^ucWWl)PMiE|Y9K9A3apRjDh&bwL+pwNgAJ##LqeG~z`xm&L zB`>s~hvKHm`_ZQzFwtM}^!!Fl>Gv4Ze*w{gVzuw4-v(nqE(TuD3^kNQ(~Szds>ye| zx3J`isWScCJIQn3U@Y4YGd_pC(C-Z=V;;T%29`;QkH)k!(CnQt1j|eg1fOD6ZvIpI zLP|;v$2GzFbL2!SrS!uFi!-Gd?Sbn(9_q(O&%&N@<(Go+hk^*dcW(agIZGrJ-c z=>TfM{zB|4=)r+7;a%?dv_K%NZga1*~QNN!t2uQ z?*F%b^)ImMUzFzm(7Ez|QJVkVnIHPU;2Hn9NoAt{4@YSHO4^j^r$h1Ux`aM4roHLE zysune3yOotSH(!`0^kKL_Di%z@9eS;9jzq7GPb&q1&%?uZ!5TEBheYl#3@0TOaWYGU)*JrbCP5|^n$O}z9$xpRy1nt}Y z9$||M3>KO*gt4&fpjjCjjsbqXk!?6EX}XXntLU^}`To0&m=vLi>o=T?47m|F6Wmv8 zY!HBfr2@on+W4c~G+!Yj42j$*94-ytc?6Qxfh;krv0|A{G=qkTbREfMCY#OaEmUSk zO@a(GVWox13V)$Qka9wAWmxg9eT?o!rOJ4(eI>Jbgb?HiU1#b`pltjbjmUgqKMWEe zjl4t_n^9rhGC?%-7sl(#ja9~B8;^@bad#Ry+Y>!aMx8qnUaWC*Hm&5^6?lV%e6|Cw z)TCCY%3ClG*y&3q?ve4MGX~7bwYd?F?ZlHnH>B$&n{ZKT6d?(Ta3A5&BzOL}_s1W* z#8__^pI`nwEBK^Q866qxj>NV`T$u6rqwT=tfAjSq#v@ zbiGkIS;@+Hn!^)>Hp2j&u_S-`8IGsv$4Bqs+&(pRRR%**yUlstCLXkPH69R-&_1>g zKrF@UXTDEh<^Ar9U%BXG!&OzP$5TdN2=6eCU~4w&Dzzl&QFM&x>^*Yw*@k)<1zVlH zj_qI%(#44U@B{&Uz6a%CpK;d+TUdV0vOyPrVIe6b)y0pRzBslJW}9GT!WOZQwtD zBmbqfVq{{V``0Ov^goyx8UA{B^4cMrrJa&;`ZoJ<_bp)-fr2 zz2?|N`45H5(xk{%%F~YRwoRcu4b3;lipNZ^W6td}uUN;+3rAoO00n(KIt;bTs>;Wn z=TnD+&ahg_M(+%b$BddMKmC^4bi%;AvNvyaSz?m~wTS(A59CtWBDXqZsRh&*= zzaz;=fO-o#UEJ0dTfIf-X(qx8U_DLxC{}S0A-O@a!51_6q~3G#%97PnDHKSOdLd!V z?#1?$rqZ3xe4X$}_#w?vFOuj^7A6YRW&U5SC{Tzdb-#O|^g2_5BJF!_>fF%HZls4_ z5~P%Pegz3Z3m|R)7R|ub^r5l|j^!c`N45L_;b{k^jQgT{=Z@;j=>zTJ%kw)2T70J9 z0vW1Bj}fSn)5{DgvQr-o(ntSTnpw=`vu|?;xXOFs}TLMK~Cp_HI3a9s*dpiIq|Ce{PVS4M5#U^ z=%m4l?(>&@tUb_KTnxS8{YnO12HXnRbnX$Gm$X9dRUV~vVV+wuJ+jevBkdexTVe$U zzSoB>Oq~l*bcdu>3QL^VJ<&^m@dSaJ%4H1KL3C_2oEOV_nE5^)jwR~Rr4{;8bUdfG zzz*W4Z%DUK;0KgcTySAFQyhsAsC(*r7?_m>$)+pEh04EUaOy-h+(Za$UA*-51s;2zsqE3B^ni|RW?@z(v1bCqT|*S?2a#C&nJ~ScDCTa^28s}*i4tU2W{|U+0~oz8s`KwDK=0I90yikj-xW^` zgFC2PMK!yhpKx0NE%ZISeQ(#GUg1N#FOxr56!^<)ySK@#S5)_TtjRh?5HLw)^|A^M zm>%gJ@EW2FLn=yhw)k7-weNp$OXU<6I7P+m%V0?LXN?$IPYl?YTCU()VRH`RnfeZ7 z8;Gh!Mb~XZDwQI01bYT|2?)7qxQV-Ivyo+@ABR5}V06%Aqxq8PB-E2sH1gd_(5owe z{eu6+mKn5609DfSah~`wD3E{cdLT$zOYjQzFw4Xt^9b=JC1D4S12U-J3eGbCVVcN0 zBYfb3Mp#kj<2m9{5B8ZnX&~F+;wIP`>ZVS13R+`_Q61S6l8aGH6cwS*!rxs;mWv;9 zs*OyG;llvid4S=KCOctc$5$o?u-~YHK_c9kl>N(>-(cp4WM%RqMVc6{Av&E@O@cU2 zzz@K<~Rf| z_uhWtmEojo0c18s_Ln@ZHx}Zrl#r) zCB}s%MaAUx-&J8Ga#zbLKxJCE6M?#BH+#^MrXLIetn@(b5tU129PLX4`LrH*%6rr2Z zv}D(`xj}GHIe>dUOwr`58#k@KqaJdYLz;7Qm`!>VZz}nt%}3KToz$O-Y>v$P16wqD zQNPPg0C59)dekgoYn{!j&rEw8tsyKZhsLbvw&th*LKb;!kw=21ma#aZVc&BGL0%a(13tKL| z4rBnevV!X4s=%y?KtW{|40p)ws0?iKOct|hF#FChQ>6p%q6U)PWm$YGi3__gB>W<` za?&tyNXWLoE3{QGnUZqmxvNaAq~1j|m%J7_u@D;)^TIk(8VK#if(W&cgU!gv{k0Xz zSuZbZ4jJ^*Idl5Lxo#xCY_CvmhPm>=`j$ z(@vIt7%Z{7D&Na9ip3ll4HYpNo};dr-&dO?dD~!_MyDdTTiW>q&dp8TlEjTK{*9D4&(8D`%O+Xp zP~g!ukMSH(nL!`>| z7^FB*i1cogu(^QpFr=@kN#Qj?by9B7ae`n8bzE- zCLA4Nx(9p!&GoQBMP%4Zibmw(2o6h3zd^k5HBK508vgzI*ovcg9Py5(=VnmoW7Xq1 z*I{^G9-hzVUHi49-u*Z=cwidel{FvJ;(bTsW`j?{FRvS_2b`WY6)G%|PC(zT`8X-Y z2Lk<)!l4^Zesxq{d++Mo-NFMAPNL4yGye-J9ju^zJ9#cp{&zVFB#%5v4mfLtpVTIv zd2q&?5UAvyAjZO_zOqQX0+z7AFBcH12xEwy5R?M&0r>h9{WL)|8qlMV2zm#D3~NIXSYE|hMcn#d5AL*S|Vu<{BOzt&aJgNu=g>9 z%#2n0qMIc_I2-F+Ca!RlW=irv>#z?Q5z2#d3}jrM@uuA0Bke9lI}x2ZW|m@ZSt?DD zhu21V0-3|8>^GUub$4*wfno^oL_a|LnNwxD89q25F45Mg2Q~bO`$~!9-NSnLT8k?K z6kdR@cV5qI^jPqh?$&lMEDY<51I>EyKA3hfH=7JQiv(Mb zJMw@m9Yfy%R|A{=(Cs3CAkyypoh|;L7ATb!$a0G)BrMF^h;-^`1QNhG5GWN{Z#${T z$bn6o9AJ|c8}4Dk>ey0#A7-Lw4`F%A^9HW! z|82zT%>p>nUmq>1&Z3>to4flSN-Co10Mj!JQ2v$U{Pk6r?bZ^#%PGdREr3ET7p~g~ zkP{}#Mzof-nItG#i#~6#BH%E)Bjc!L6BB);HY%#YuAr`lQQ#;VgNP>8eoL)7DN0G#vAak4GmMS;dk=}3##fryl~zYQ@lgH@CF;O6yyzv07!IR~TRc;HVg<& zxmmf{9w8T<7E`n@5{a-o{K9%=5AigaFq)C6`J`UY^k-G`FV+!9TPOQoF_CC`4yFWc zHS4GK-5vnb0SxZH^|JrKKL2yW_HO{hcihRp?Ai}{c1GrZAP~&|mi{VhV4>(_{qJou zv#oP=&F}0IG?9*xM`K?Qd}I)RH!^T?pcV5dx4H|o<_7H_*MUgW96(m1aVSn7S5I#v zyJ1+-Sz;73-VjlS4q|xOobGOgm52)Dmcsi)8e~ZdXt=aVE}4)5X<%Q}#aRp|Oah?c zh%%x(hH9n7eJS$-lA|$*8tl|gDj2BrSDCdM4t3~DcJSyyP0=D`P-n1VKz(gsiaJo} zD8-#fG!}G+O?I&6?nIr0kha2c*^yZg8iBf3!gNMs;D}a+gPas_U(f`kxoA=J{bS@f z5Xgkx>?H=-M1|~h%1aH4`hj=sJ8O-2o+!1Akt#{DZeqe>-rtrf{u~(zzV7tcV3{n* zdC)lL&bYeovuOHekkw7>R-om8>Y*;?qy56T+WqQPU+A*muqts-clkIQ?RDY2z**(t zEhwg>US_m!*d1W*hDaPfirD#)^|F#wr{6}X;C+ft z+a3IvlGKhcG`6kHnpd?;9P^C-tGz3aYU1kRpcIt?E>#c|2yq1onF-km(U_njgvDS{ zQD{si5X=HefQSmXg3^9aQ4xfqRH4-hQWl|XwLyf+B7%ZIK^B*YihzK!egi5{%E$Ts z=%3CxIcMH`_ugObZ|=OA_s)Iu&J}G>(z&VX8tplEF*f@PwdRz46rWc z@izsh-zf^StQK`5YM)9k`Mhrpk8D+5wc1%)6B+hMnA3k{Xh`5=88;Fz+)XoPTqpC4n^ZHt(~Lj3{5mzgJHs

DBwXE$P=|x?5Xs-bsm%Z)(bSZ}{Cg`pPxs!7}9o zOSh#Ft&$&es@C10xHc8Y)0}1xAAZ>K>fi}$)azBzvWKjc2f=c`48w$9-d==~LJsTS zFc#&>PG$WP68VT3);?!5+N38&?@x>5bKN~d%rDOc#+B<{>s2-lceR!HHwE=6+zW!b z6tJsC)pgVp*npY{)pC+oG5tEBWlqTZto5aLguR{qz&p$KCr_Sml6zjfP3j(4!hjj~ zIDC9$w_Ap-cW=79I;~{q0V~(*QGE{K$vPeN9*>(}zfR1%; z(#uE{o1KCPeYMypb_+Elu`3TQlx}&Edn@lUGx5p|b=45ABO~??!^Y<5zf2l9s(r}Z z^F(}0eOI7H?XrMv`%8~xsv5OL3(*dSnB}+H`zPK&AQ!|h#w4xVWEGt!Qf}S-OIooxp7|?w$=mmct0lC^h?e?VndPSmD)p@qgaob$> zEEWA!>7uSVoSku&f>&-81&O*dSZ3 zA5mm#SoXYvLE2H0i!}?af(>iF&8Wp}?y0*hs{gvuV&&*$>9(-H`Og~jL2lYul~j~9@+3DlSr_zHu!2sar7?rQ(b?_{oKkt)*9(O-^^dP!ecjm zvj!ZH?cWBT9Rf65Z8uL6=X(wWCg3 z^Fuof!U?%4m+iGTw1(4y6S^v`LIaI5e^0VXv(PlQULSi@+iCWb#8%?7fZ5u{ZI#Sov*YKg%GNj2WuYW*Ss~sC{`% z35tsHc(us&(Csjj<>zkSZnj;e&sw+TYd!KGS!;_0CZU}M&wGoi-@nS-abWGUYo>I! zz@y?fTEpDh0PW$y-nO#aCEZ;u28;ENVX_<3T;hzsJ);+JL_9Lw-%}87Xzbay=6ONv zHvwnD9nTK1aN$k{yJ_1|kMA8QtUvmCp}2o@uaR?AIJ11YI_}V(?O|2BBj(h+9XNEP zVM9X3&(HdBLs#QXAA-7@Y!BZ|Rz~R$-$<^>yD}ZS+oI z?-bv?#5q4xT+~n)_w7zxuZ3QBuTKxo`eoHKVr%mLtm|&E88z{u#JhiaWu6>xNwX-Y z-w~5u6(kONX%C#d_2>8T*^yI#MDV#*NEaLv zV0zObPyUtw3B(Gpcjg4_^;8XtoGcf53t?*rxlkaIlI2!NHvF}Chzy^PR-=)K zF%j85D05+SL`dZLLx_j#*YtvaXI5S-o6sS7z&qzh6Ioh z62U1ipgD-1P7h{hTu~%7ALGWHBNn3NY;RZzm{!Sql2?CIf*W8Zi)oc$BqnoaETIrR zP5|s5h07uH#1bJ}MrC_@3-}!NC=+NHgo2)Enjm9To+E+SGO=XrT8vXPc2QW6&);iD z5k^@R@TqLBkT0U3Kb9sej*5)c{#|(t{+~z`*hMmZ^bbG`_(z3=^`pQPU8b1J=k1@s2VqQb z6dyw(<47m~H;z9^a;lAwci;bIWwPufJMdK^J4(b{Uk)VsrxSl`ouoAuBxHt|<2xF* zHVmq-FP}>Wun-RmSnyB)39}c4CGof@A`uIs_HwZx8-wTV1v%_7VwlXyy8p0iBa+J4 zA`bM)E*=6gcn%(mB5*hW3X6g7Y9fckL4hF1#X?*jmJM=0*`1{O(e8)Hm~@YR=m5sz z1?~ah&Idi-`}`yF#$)r}JehF#-?Tc#)@P6@gv?z0OxMi(Olkd0*Azl#E`FwKW`3r$ zex_>*Au|_0(={_cQ(9-DOJidE0&iDkh4hCnTUE^5^3wrP2lp zwa-67|K`oR?dNw52Q`LZd3vlO0mskYW)rld2>Ry4;*o`yH@skEUHv+tFzYMig2Qte z2o0yujf)c^)H~z!FVpl35$a$lP4m8SoB`QV7BoL7!eoD#OKw@adA99?YmGh+ObqiY zaIed2dNRz`!wbW1y>iN%mWG)ZTTP__PU)F}1`IY0UK)2JwbI`VRZ@(;zs0VkthB1y z-+U|6tfaI;YGY>ez&jO#0kDJ{02YTOWCGxQ9NU`R9uVI$oFz!Hfhnl(Tp zk>HnP5)Q7#SWqe0jdU!LU`;1s03s2C#nC`I7EdD5Xcz(k4{pR$_n>V=JTU^qe@I{a zG#x z%&T!pS!6>Gy^Z9V>MS`qH*)o+$dOxxg)7y+)H#tDMq9ZJiQl?r;k?K~q(0q4=j7!F z-3LxfIV(C(pbHLm#CGTVzs}fX*#0Q>0Oz|-uLOf-2~Rg+b5!F0(DE{8C@_QO`erQF z;=$>0fl~Dh-MD@U~6w zlHL8gN{PFx1kI-+r5n4shO$5ji$uH^u>L+%9lG8brcH>8Io4XC*dE8}exX)XuD-SZ z_Dl2OB_4g6X{i@=sUiM9J(C@Au}XPU+j%pt(S}%FRA2amq&N;c7 + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/eshipper_xml/vendor/sample/sample_quote_request.xml b/modules/connectors/eshipper_xml/vendor/sample/sample_quote_request.xml new file mode 100644 index 0000000000..e0ea63063e --- /dev/null +++ b/modules/connectors/eshipper_xml/vendor/sample/sample_quote_request.xml @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/eshipper_xml/vendor/sample/sample_shipment_cancel_reply.xml b/modules/connectors/eshipper_xml/vendor/sample/sample_shipment_cancel_reply.xml new file mode 100644 index 0000000000..e93a4f2a8d --- /dev/null +++ b/modules/connectors/eshipper_xml/vendor/sample/sample_shipment_cancel_reply.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/modules/connectors/eshipper_xml/vendor/sample/sample_shipment_cancel_request.xml b/modules/connectors/eshipper_xml/vendor/sample/sample_shipment_cancel_request.xml new file mode 100644 index 0000000000..f4846b1b2d --- /dev/null +++ b/modules/connectors/eshipper_xml/vendor/sample/sample_shipment_cancel_request.xml @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/modules/connectors/eshipper_xml/vendor/sample/sample_shipping_reply.xml b/modules/connectors/eshipper_xml/vendor/sample/sample_shipping_reply.xml new file mode 100644 index 0000000000..fbc4cc5b8b --- /dev/null +++ b/modules/connectors/eshipper_xml/vendor/sample/sample_shipping_reply.xml @@ -0,0 +1,26 @@ + + + + + + + + + http://www.fedex.com/Tracking?tracknumbers=052800410000484 + [base-64 encoded String] + + + + + [base-64 encoded String] + + + + + \ No newline at end of file diff --git a/modules/connectors/eshipper_xml/vendor/sample/sample_shipping_request.xml b/modules/connectors/eshipper_xml/vendor/sample/sample_shipping_request.xml new file mode 100644 index 0000000000..2fd77b2e63 --- /dev/null +++ b/modules/connectors/eshipper_xml/vendor/sample/sample_shipping_request.xml @@ -0,0 +1,28 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/modules/connectors/eshipper_xml/vendor/schemas/error.xsd b/modules/connectors/eshipper_xml/vendor/schemas/error.xsd new file mode 100644 index 0000000000..41443010d9 --- /dev/null +++ b/modules/connectors/eshipper_xml/vendor/schemas/error.xsd @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/eshipper_xml/vendor/schemas/quote_reply.xsd b/modules/connectors/eshipper_xml/vendor/schemas/quote_reply.xsd new file mode 100644 index 0000000000..dfccc7deda --- /dev/null +++ b/modules/connectors/eshipper_xml/vendor/schemas/quote_reply.xsd @@ -0,0 +1,42 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/eshipper_xml/vendor/schemas/quote_request.xsd b/modules/connectors/eshipper_xml/vendor/schemas/quote_request.xsd new file mode 100644 index 0000000000..8b93ad2bf7 --- /dev/null +++ b/modules/connectors/eshipper_xml/vendor/schemas/quote_request.xsd @@ -0,0 +1,148 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/eshipper_xml/vendor/schemas/shipment_cancel_reply.xsd b/modules/connectors/eshipper_xml/vendor/schemas/shipment_cancel_reply.xsd new file mode 100644 index 0000000000..c88f7e6d32 --- /dev/null +++ b/modules/connectors/eshipper_xml/vendor/schemas/shipment_cancel_reply.xsd @@ -0,0 +1,34 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/eshipper_xml/vendor/schemas/shipment_cancel_request.xsd b/modules/connectors/eshipper_xml/vendor/schemas/shipment_cancel_request.xsd new file mode 100644 index 0000000000..c5acb14d1b --- /dev/null +++ b/modules/connectors/eshipper_xml/vendor/schemas/shipment_cancel_request.xsd @@ -0,0 +1,26 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/eshipper_xml/vendor/schemas/shipping_reply.xsd b/modules/connectors/eshipper_xml/vendor/schemas/shipping_reply.xsd new file mode 100644 index 0000000000..eddea9c79c --- /dev/null +++ b/modules/connectors/eshipper_xml/vendor/schemas/shipping_reply.xsd @@ -0,0 +1,106 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/eshipper_xml/vendor/schemas/shipping_request.xsd b/modules/connectors/eshipper_xml/vendor/schemas/shipping_request.xsd new file mode 100644 index 0000000000..ec5eb4e125 --- /dev/null +++ b/modules/connectors/eshipper_xml/vendor/schemas/shipping_request.xsd @@ -0,0 +1,276 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/modules/connectors/usps/README.md b/modules/connectors/usps/README.md index bd586531c3..7565670e31 100644 --- a/modules/connectors/usps/README.md +++ b/modules/connectors/usps/README.md @@ -1,4 +1,3 @@ - # karrio.usps This package is a USPS extension of the [karrio](https://pypi.org/project/karrio) multi carrier shipping SDK. @@ -21,7 +20,7 @@ from karrio.mappers.usps.settings import Settings # Initialize a carrier gateway -usps = karrio.gateway["usps"].create( +canadapost = karrio.gateway["usps"].create( Settings( ... ) diff --git a/modules/connectors/usps/generate b/modules/connectors/usps/generate index 9f6b59048e..f8519c5fd1 100755 --- a/modules/connectors/usps/generate +++ b/modules/connectors/usps/generate @@ -1,24 +1,68 @@ SCHEMAS=./schemas LIB_MODULES=./karrio/schemas/usps -find "${LIB_MODULES}" -name "*.py" -exec rm -r {} \; -touch "${LIB_MODULES}/__init__.py" +mkdir -p $LIB_MODULES +find $LIB_MODULES -name "*.py" -exec rm -r {} \; +touch $LIB_MODULES/__init__.py -quicktype() { - echo "Generating $1..." - docker run -it --rm --name quicktype -v $PWD:/app -e SCHEMAS=/app/schemas -e LIB_MODULES=/app/karrio/schemas/usps \ - karrio/tools /quicktype/script/quicktype --no-uuids --no-date-times --no-enums --src-lang json --lang jstruct \ - --no-nice-property-names --all-properties-optional --type-as-suffix $@ -} - -quicktype --src="${SCHEMAS}/error_response.json" --out="${LIB_MODULES}/error_response.py" -quicktype --src="${SCHEMAS}/label_request.json" --out="${LIB_MODULES}/label_request.py" -quicktype --src="${SCHEMAS}/label_response.json" --out="${LIB_MODULES}/label_response.py" -quicktype --src="${SCHEMAS}/pickup_request.json" --out="${LIB_MODULES}/pickup_request.py" -quicktype --src="${SCHEMAS}/pickup_response.json" --out="${LIB_MODULES}/pickup_response.py" -quicktype --src="${SCHEMAS}/pickup_update_request.json" --out="${LIB_MODULES}/pickup_update_request.py" -quicktype --src="${SCHEMAS}/pickup_update_response.json" --out="${LIB_MODULES}/pickup_update_response.py" -quicktype --src="${SCHEMAS}/rate_request.json" --out="${LIB_MODULES}/rate_request.py" -quicktype --src="${SCHEMAS}/rate_response.json" --out="${LIB_MODULES}/rate_response.py" -quicktype --src="${SCHEMAS}/scan_form_request.json" --out="${LIB_MODULES}/scan_form_request.py" -quicktype --src="${SCHEMAS}/scan_form_response.json" --out="${LIB_MODULES}/scan_form_response.py" -quicktype --src="${SCHEMAS}/tracking_response.json" --out="${LIB_MODULES}/tracking_response.py" +generateDS --no-namespace-defs -o "$LIB_MODULES/address_validate_request.py" $SCHEMAS/AddressValidateRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/address_validate_response.py" $SCHEMAS/AddressValidateResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/carrier_pickup_availability_request.py" $SCHEMAS/CarrierPickupAvailabilityRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/carrier_pickup_availability_response.py" $SCHEMAS/CarrierPickupAvailabilityResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/carrier_pickup_cancel_request.py" $SCHEMAS/CarrierPickupCancelRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/carrier_pickup_cancel_response.py" $SCHEMAS/CarrierPickupCancelResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/carrier_pickup_change_request.py" $SCHEMAS/CarrierPickupChangeRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/carrier_pickup_change_response.py" $SCHEMAS/CarrierPickupChangeResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/carrier_pickup_inquiry_request.py" $SCHEMAS/CarrierPickupInquiryRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/carrier_pickup_inquiry_response.py" $SCHEMAS/CarrierPickupInquiryResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/carrier_pickup_schedule_request.py" $SCHEMAS/CarrierPickupScheduleRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/carrier_pickup_schedule_response.py" $SCHEMAS/CarrierPickupScheduleResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/city_state_lookup_request.py" $SCHEMAS/CityStateLookupRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/city_state_lookup_response.py" $SCHEMAS/CityStateLookupResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/emrsv4_0_bulk_request.py" $SCHEMAS/EMRSV4.0BulkRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/error.py" $SCHEMAS/Error.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/express_mail_commitment_request.py" $SCHEMAS/ExpressMailCommitmentRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/express_mail_commitment_response.py" $SCHEMAS/ExpressMailCommitmentResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/first_class_mail_request.py" $SCHEMAS/FirstClassMailRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/first_class_mail_response.py" $SCHEMAS/FirstClassMailResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/hfp_facility_info_request.py" $SCHEMAS/HFPFacilityInfoRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/hfp_facility_info_response.py" $SCHEMAS/HFPFacilityInfoResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/intl_rate_v2_request.py" $SCHEMAS/IntlRateV2Request.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/intl_rate_v2_response.py" $SCHEMAS/IntlRateV2Response.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/mrsv4_0_request.py" $SCHEMAS/MRSV4.0Request.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/pts_emailresult.py" $SCHEMAS/PTSEmailResult.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/pts_email_request.py" $SCHEMAS/PTSEmailRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/ptspod_result.py" $SCHEMAS/PTSPODResult.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/ptspod_request.py" $SCHEMAS/PTSPodRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/ptsrre_result.py" $SCHEMAS/PTSRREResult.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/ptsrre_request.py" $SCHEMAS/PTSRreRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/ptstpod_result.py" $SCHEMAS/PTSTPODResult.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/ptstpod_request.py" $SCHEMAS/PTSTPodRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/priority_mail_request.py" $SCHEMAS/PriorityMailRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/priority_mail_response.py" $SCHEMAS/PriorityMailResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/rate_v4_request.py" $SCHEMAS/RateV4Request.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/rate_v4_response.py" $SCHEMAS/RateV4Response.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/scan_request.py" $SCHEMAS/SCANRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/scan_response.py" $SCHEMAS/SCANResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/sdc_get_locations_request.py" $SCHEMAS/SDCGetLocationsRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/sdc_get_locations_response.py" $SCHEMAS/SDCGetLocationsResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/standard_b_request.py" $SCHEMAS/StandardBRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/standard_b_response.py" $SCHEMAS/StandardBResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/track_field_request.py" $SCHEMAS/TrackFieldRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/track_request.py" $SCHEMAS/TrackRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/track_response.py" $SCHEMAS/TrackResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/zip_code_lookup_request.py" $SCHEMAS/ZipCodeLookupRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/zip_code_lookup_response.py" $SCHEMAS/ZipCodeLookupResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/evs_cancel_request.py" $SCHEMAS/eVSCancelRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/evs_cancel_response.py" $SCHEMAS/eVSCancelResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/evs_request.py" $SCHEMAS/eVSRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/evs_response.py" $SCHEMAS/eVSResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/evs_express_mail_intl_request.py" $SCHEMAS/eVSExpressMailIntlRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/evs_express_mail_intl_response.py" $SCHEMAS/eVSExpressMailIntlResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/evs_first_class_mail_intl_request.py" $SCHEMAS/eVSFirstClassMailIntlRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/evs_first_class_mail_intl_response.py" $SCHEMAS/eVSFirstClassMailIntlResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/evs_gxg_get_label_request.py" $SCHEMAS/eVSGXGGetLabelRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/evs_gxg_get_label_response.py" $SCHEMAS/eVSGXGGetLabelResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/evsi_cancel_request.py" $SCHEMAS/eVSICancelRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/evsi_cancel_response.py" $SCHEMAS/eVSICancelResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/evs_priority_mail_intl_request.py" $SCHEMAS/eVSPriorityMailIntlRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/evs_priority_mail_intl_response.py" $SCHEMAS/eVSPriorityMailIntlResponse.xsd diff --git a/modules/connectors/usps/karrio/mappers/usps/__init__.py b/modules/connectors/usps/karrio/mappers/usps/__init__.py index bfc44aa754..fa7c51c91e 100644 --- a/modules/connectors/usps/karrio/mappers/usps/__init__.py +++ b/modules/connectors/usps/karrio/mappers/usps/__init__.py @@ -4,19 +4,16 @@ from karrio.mappers.usps.proxy import Proxy from karrio.mappers.usps.settings import Settings import karrio.providers.usps.units as units -import karrio.providers.usps.utils as utils METADATA = Metadata( id="usps", - label="USPS", + label="USPS Web Tools", # Integrations Mapper=Mapper, Proxy=Proxy, Settings=Settings, # Data Units - is_hub=False, + services=units.ShipmentService, options=units.ShippingOption, - services=units.ShippingService, - connection_configs=utils.ConnectionConfig, ) diff --git a/modules/connectors/usps/karrio/mappers/usps/mapper.py b/modules/connectors/usps/karrio/mappers/usps/mapper.py index 52f1f78526..6c1aea0d34 100644 --- a/modules/connectors/usps/karrio/mappers/usps/mapper.py +++ b/modules/connectors/usps/karrio/mappers/usps/mapper.py @@ -1,94 +1,103 @@ -"""Karrio USPS client mapper.""" +from typing import List, Tuple +from karrio.core.utils.serializable import Serializable, Deserializable +from karrio.api.mapper import Mapper as BaseMapper +from karrio.core.models import ( + ShipmentCancelRequest, + # PickupUpdateRequest, + # PickupCancelRequest, + ShipmentRequest, + TrackingRequest, + # PickupRequest, + RateRequest, + ConfirmationDetails, + TrackingDetails, + ShipmentDetails, + # PickupDetails, + RateDetails, + Message, +) +from karrio.providers.usps import ( + parse_shipment_cancel_response, + # parse_pickup_update_response, + # parse_pickup_cancel_response, + parse_shipment_response, + parse_tracking_response, + # parse_pickup_response, + parse_rate_response, + shipment_cancel_request, + # pickup_update_request, + # pickup_cancel_request, + tracking_request, + shipment_request, + # pickup_request, + rate_request, +) +from karrio.mappers.usps.settings import Settings -import typing -import karrio.lib as lib -import karrio.api.mapper as mapper -import karrio.core.models as models -import karrio.providers.usps as provider -import karrio.mappers.usps.settings as provider_settings +class Mapper(BaseMapper): + settings: Settings -class Mapper(mapper.Mapper): - settings: provider_settings.Settings + def create_rate_request(self, payload: RateRequest) -> Serializable: + return rate_request(payload, self.settings) + + def create_tracking_request(self, payload: TrackingRequest) -> Serializable: + return tracking_request(payload, self.settings) + + def create_shipment_request(self, payload: ShipmentRequest) -> Serializable: + return shipment_request(payload, self.settings) + + # def create_pickup_request( + # self, payload: PickupRequest + # ) -> Serializable: + # return pickup_request(payload, self.settings) + # + # def create_pickup_update_request( + # self, payload: PickupUpdateRequest + # ) -> Serializable: + # return pickup_update_request(payload, self.settings) + # + # def create_cancel_pickup_request( + # self, payload: PickupCancelRequest + # ) -> Serializable: + # return pickup_cancel_request(payload, self.settings) - def create_rate_request( - self, payload: models.RateRequest - ) -> lib.Serializable: - return provider.rate_request(payload, self.settings) - - def create_tracking_request( - self, payload: models.TrackingRequest - ) -> lib.Serializable: - return provider.tracking_request(payload, self.settings) - - def create_shipment_request( - self, payload: models.ShipmentRequest - ) -> lib.Serializable: - return provider.shipment_request(payload, self.settings) - - def create_pickup_request( - self, payload: models.PickupRequest - ) -> lib.Serializable: - return provider.pickup_request(payload, self.settings) - - def create_pickup_update_request( - self, payload: models.PickupUpdateRequest - ) -> lib.Serializable: - return provider.pickup_update_request(payload, self.settings) - - def create_cancel_pickup_request( - self, payload: models.PickupCancelRequest - ) -> lib.Serializable: - return provider.pickup_cancel_request(payload, self.settings) - def create_cancel_shipment_request( - self, payload: models.ShipmentCancelRequest - ) -> lib.Serializable[str]: - return provider.shipment_cancel_request(payload, self.settings) - - def create_manifest_request( - self, payload: models.ManifestRequest - ) -> lib.Serializable: - return provider.manifest_request(payload, self.settings) - - - def parse_cancel_pickup_response( - self, response: lib.Deserializable[str] - ) -> typing.Tuple[models.ConfirmationDetails, typing.List[models.Message]]: - return provider.parse_pickup_cancel_response(response, self.settings) - + self, payload: ShipmentCancelRequest + ) -> Serializable: + return shipment_cancel_request(payload, self.settings) + + # def parse_cancel_pickup_response( + # self, response: Deserializable + # ) -> Tuple[ConfirmationDetails, List[Message]]: + # return parse_pickup_cancel_response(response, self.settings) + def parse_cancel_shipment_response( - self, response: lib.Deserializable[str] - ) -> typing.Tuple[models.ConfirmationDetails, typing.List[models.Message]]: - return provider.parse_shipment_cancel_response(response, self.settings) - - def parse_pickup_response( - self, response: lib.Deserializable[str] - ) -> typing.Tuple[models.PickupDetails, typing.List[models.Message]]: - return provider.parse_pickup_response(response, self.settings) - - def parse_pickup_update_response( - self, response: lib.Deserializable[str] - ) -> typing.Tuple[models.PickupDetails, typing.List[models.Message]]: - return provider.parse_pickup_update_response(response, self.settings) - + self, response: Deserializable + ) -> Tuple[ConfirmationDetails, List[Message]]: + return parse_shipment_cancel_response(response, self.settings) + + # def parse_pickup_response( + # self, response: Deserializable + # ) -> Tuple[PickupDetails, List[Message]]: + # return parse_pickup_response(response, self.settings) + # + # def parse_pickup_update_response( + # self, response: Deserializable + # ) -> Tuple[PickupDetails, List[Message]]: + # return parse_pickup_update_response(response, self.settings) + def parse_rate_response( - self, response: lib.Deserializable[str] - ) -> typing.Tuple[typing.List[models.RateDetails], typing.List[models.Message]]: - return provider.parse_rate_response(response, self.settings) - + self, response: Deserializable + ) -> Tuple[List[RateDetails], List[Message]]: + return parse_rate_response(response, self.settings) + def parse_shipment_response( - self, response: lib.Deserializable[str] - ) -> typing.Tuple[models.ShipmentDetails, typing.List[models.Message]]: - return provider.parse_shipment_response(response, self.settings) - + self, response: Deserializable + ) -> Tuple[ShipmentDetails, List[Message]]: + return parse_shipment_response(response, self.settings) + def parse_tracking_response( - self, response: lib.Deserializable[str] - ) -> typing.Tuple[typing.List[models.TrackingDetails], typing.List[models.Message]]: - return provider.parse_tracking_response(response, self.settings) - - def parse_manifest_response( - self, response: lib.Deserializable[str] - ) -> typing.Tuple[models.ManifestDetails, typing.List[models.Message]]: - return provider.parse_manifest_response(response, self.settings) - + self, response: Deserializable + ) -> Tuple[List[TrackingDetails], List[Message]]: + return parse_tracking_response(response, self.settings) diff --git a/modules/connectors/usps/karrio/mappers/usps/proxy.py b/modules/connectors/usps/karrio/mappers/usps/proxy.py index 4bd7992bd5..21678fc202 100644 --- a/modules/connectors/usps/karrio/mappers/usps/proxy.py +++ b/modules/connectors/usps/karrio/mappers/usps/proxy.py @@ -1,151 +1,52 @@ -"""Karrio USPS client proxy.""" +import urllib.parse +from karrio.api.proxy import Proxy as BaseProxy +from karrio.core.utils import Serializable, Deserializable, XP, request as http +from karrio.mappers.usps.settings import Settings -import karrio.lib as lib -import karrio.api.proxy as proxy -import karrio.mappers.usps.settings as provider_settings +class Proxy(BaseProxy): + settings: Settings -class Proxy(proxy.Proxy): - settings: provider_settings.Settings + """ Proxy interface method implementations """ - def get_rates(self, request: lib.Serializable) -> lib.Deserializable[str]: - response = lib.run_asynchronously( - lambda _: lib.request( - url=f"{self.settings.server_url}/v3/total-rates/search", - data=lib.to_json(_), - trace=self.trace_as("json"), - method="POST", - headers={ - "Content-Type": "application/json", - "Authorization": f"Bearer {self.settings.access_token}", - }, - ), - request.serialize(), + def get_tracking(self, request: Serializable) -> Deserializable: + query = urllib.parse.urlencode({"API": "TrackV2", "XML": request.serialize()}) + response = http( + url=f"{self.settings.server_url}?{query}", + trace=self.trace_as("xml"), + method="GET", ) - return lib.Deserializable(response, lambda _: [lib.to_dict(_) for _ in _]) + return Deserializable(response, XP.to_xml) - def create_shipment(self, request: lib.Serializable) -> lib.Deserializable[str]: - response = lib.run_asynchronously( - lambda _: lib.request( - url=f"{self.settings.server_url}/v3/label", - data=lib.to_json(_), - trace=self.trace_as("json"), - method="POST", - headers={ - "Content-Type": "application/json", - "Authorization": f"Bearer {self.settings.access_token}", - }, - ), - request.serialize(), + def get_rates(self, request: Serializable) -> Deserializable: + query = urllib.parse.urlencode({"API": "RateV4", "XML": request.serialize()}) + response = http( + url=f"{self.settings.server_url}?{query}", + trace=self.trace_as("xml"), + method="GET", ) - return lib.Deserializable( - response, - lambda _: [lib.to_dict(_) for _ in _], - request.ctx, - ) - - def cancel_shipment(self, request: lib.Serializable) -> lib.Deserializable[str]: - response = lib.run_asynchronously( - lambda _: ( - _["trackingNumber"], - lib.request( - url=f"{self.settings.server_url}/v3/label/{_['trackingNumber']}", - data=lib.to_json(request.serialize()), - trace=self.trace_as("json"), - method="POST", - headers={ - "Content-Type": "application/json", - "Authorization": f"Bearer {self.settings.access_token}", - }, - on_ok=lambda _: '{"ok": true}', - ), - ), - request.serialize(), - ) - - return lib.Deserializable( - response, - lambda __: [(_[0], lib.to_dict(_[1])) for _ in __], - ) - - def get_tracking(self, request: lib.Serializable) -> lib.Deserializable[str]: - response = lib.run_asynchronously( - lambda trackingNumber: ( - trackingNumber, - lib.request( - url=f"{self.settings.server_url}/v3/tracking/{trackingNumber}", - data=lib.to_json(request.serialize()), - trace=self.trace_as("json"), - method="POST", - headers={ - "Content-Type": "application/json", - "Authorization": f"Bearer {self.settings.access_token}", - }, - ), - ), - request.serialize(), - ) - - return lib.Deserializable( - response, - lambda __: [(_[0], lib.to_dict(_[1])) for _ in __], - ) - - def schedule_pickup(self, request: lib.Serializable) -> lib.Deserializable[str]: - response = lib.request( - url=f"{self.settings.server_url}/v3/carrier-pickup", - data=lib.to_json(request.serialize()), - trace=self.trace_as("json"), - method="POST", - headers={ - "Content-Type": "application/json", - "Authorization": f"Bearer {self.settings.access_token}", - }, - ) - - return lib.Deserializable(response, lib.to_dict) - - def modify_pickup(self, request: lib.Serializable) -> lib.Deserializable[str]: - response = lib.request( - url=f"{self.settings.server_url}/v3/carrier-pickup/{request.ctx['confirmationNumber']}", - data=lib.to_json(request.serialize()), - trace=self.trace_as("json"), - method="POST", - headers={ - "Content-Type": "application/json", - "Authorization": f"Bearer {self.settings.access_token}", - }, - ) - - return lib.Deserializable(response, lib.to_dict) + return Deserializable(response, XP.to_xml) - def cancel_pickup(self, request: lib.Serializable) -> lib.Deserializable[str]: - response = lib.request( - url=f"{self.settings.server_url}/v3/carrier-pickup/{request.serialize()['confirmationNumber']}", - data=lib.to_json(request.serialize()), - trace=self.trace_as("json"), - method="POST", - headers={ - "Content-Type": "application/json", - "Authorization": f"Bearer {self.settings.access_token}", - }, - on_ok=lambda _: '{"ok": true}', + def create_shipment(self, request: Serializable) -> Deserializable: + api = "eVSCertify" if self.settings.test_mode else "eVS" + serialized_request = request.serialize().replace("eVSRequest", f"{api}Request") + query = urllib.parse.urlencode({"API": api, "XML": serialized_request}) + response = http( + url=f"{self.settings.server_url}?{query}", + trace=self.trace_as("xml"), + method="GET", ) - return lib.Deserializable(response, lib.to_dict) + return Deserializable(response, XP.to_xml) - def create_manifest(self, request: lib.Serializable) -> lib.Deserializable[str]: - response = lib.request( - url=f"{self.settings.server_url}/v3/scan-form", - data=lib.to_json(request.serialize()), - trace=self.trace_as("json"), - method="POST", - headers={ - "Content-Type": "application/json", - "Authorization": f"Bearer {self.settings.access_token}", - }, + def cancel_shipment(self, request: Serializable) -> Deserializable: + query = urllib.parse.urlencode({"API": "eVSCancel", "XML": request.serialize()}) + response = http( + url=f"{self.settings.server_url}?{query}", + trace=self.trace_as("xml"), + method="GET", ) - return lib.Deserializable(response, lib.to_dict) + return Deserializable(response, XP.to_xml) diff --git a/modules/connectors/usps/karrio/mappers/usps/settings.py b/modules/connectors/usps/karrio/mappers/usps/settings.py index 7e2df70d2a..d6eb88556a 100644 --- a/modules/connectors/usps/karrio/mappers/usps/settings.py +++ b/modules/connectors/usps/karrio/mappers/usps/settings.py @@ -1,20 +1,21 @@ """Karrio USPS client settings.""" import attr -import karrio.providers.usps.utils as provider_utils +from karrio.providers.usps.utils import Settings as BaseSettings @attr.s(auto_attribs=True) -class Settings(provider_utils.Settings): +class Settings(BaseSettings): """USPS connection settings.""" - # Add carrier specific API connection properties here - client_id: str - client_secret: str - account_type: str = None - account_number: str = None + # Carrier specific properties + username: str + password: str + mailer_id: str = None + customer_registration_id: str = None + logistics_manager_mailer_id: str = None - # generic properties + # Base properties id: str = None test_mode: bool = False carrier_id: str = "usps" diff --git a/modules/connectors/usps/karrio/providers/usps/__init__.py b/modules/connectors/usps/karrio/providers/usps/__init__.py index dc2163e49d..43cc125948 100644 --- a/modules/connectors/usps/karrio/providers/usps/__init__.py +++ b/modules/connectors/usps/karrio/providers/usps/__init__.py @@ -1,5 +1,3 @@ -"""Karrio USPS provider imports.""" - from karrio.providers.usps.utils import Settings from karrio.providers.usps.rate import parse_rate_response, rate_request from karrio.providers.usps.shipment import ( @@ -8,19 +6,15 @@ shipment_cancel_request, shipment_request, ) -from karrio.providers.usps.pickup import ( - parse_pickup_cancel_response, - parse_pickup_update_response, - parse_pickup_response, - pickup_update_request, - pickup_cancel_request, - pickup_request, -) +# from karrio.providers.usps.pickup import ( +# parse_pickup_cancel_response, +# parse_pickup_update_response, +# parse_pickup_response, +# pickup_update_request, +# pickup_cancel_request, +# pickup_request, +# ) from karrio.providers.usps.tracking import ( parse_tracking_response, tracking_request, ) -from karrio.providers.usps.manifest import ( - parse_manifest_response, - manifest_request, -) diff --git a/modules/connectors/usps/karrio/providers/usps/error.py b/modules/connectors/usps/karrio/providers/usps/error.py index af72796a20..28b4fc4816 100644 --- a/modules/connectors/usps/karrio/providers/usps/error.py +++ b/modules/connectors/usps/karrio/providers/usps/error.py @@ -1,26 +1,24 @@ -"""Karrio USPS error parser.""" +from typing import List +from karrio.schemas.usps.error import Error +from karrio.core.utils import Element, XP +from karrio.core.models import Message +from karrio.providers.usps.utils import Settings -import typing -import karrio.lib as lib -import karrio.core.models as models -import karrio.providers.usps.utils as provider_utils - -def parse_error_response( - response: typing.Union[dict, typing.List[dict]], - settings: provider_utils.Settings, - **kwargs, -) -> typing.List[models.Message]: - responses = response if isinstance(response, list) else [response] - errors: list = [response["error"] for response in responses if "error" in response] +def parse_error_response(response: Element, settings: Settings) -> List[Message]: + error_nodes = ( + [response] + if response.tag == "Error" + else response.xpath(".//*[local-name() = $name]", name="Error") + ) + errors = [XP.to_object(Error, node) for node in error_nodes] return [ - models.Message( - carrier_id=settings.carrier_id, + Message( carrier_name=settings.carrier_name, - code=error.get("code"), - message=error.get("message"), - details={**kwargs, "errors": error.get("errors", [])}, + carrier_id=settings.carrier_id, + code=str(error.Number), + message=error.Description, ) for error in errors ] diff --git a/modules/connectors/usps/karrio/providers/usps/pickup/__init__.py b/modules/connectors/usps/karrio/providers/usps/pickup/__init__.py index edc1f68fbe..7c9d5c330b 100644 --- a/modules/connectors/usps/karrio/providers/usps/pickup/__init__.py +++ b/modules/connectors/usps/karrio/providers/usps/pickup/__init__.py @@ -1,4 +1,3 @@ - from karrio.providers.usps.pickup.create import parse_pickup_response, pickup_request from karrio.providers.usps.pickup.update import parse_pickup_update_response, pickup_update_request from karrio.providers.usps.pickup.cancel import parse_pickup_cancel_response, pickup_cancel_request diff --git a/modules/connectors/usps/karrio/providers/usps/pickup/cancel.py b/modules/connectors/usps/karrio/providers/usps/pickup/cancel.py index 54f57ea858..bb6db5bac1 100644 --- a/modules/connectors/usps/karrio/providers/usps/pickup/cancel.py +++ b/modules/connectors/usps/karrio/providers/usps/pickup/cancel.py @@ -1,40 +1,50 @@ -import typing +from typing import Tuple, List +from karrio.schemas.usps.carrier_pickup_cancel_request import CarrierPickupCancelRequest +from karrio.core.utils import Serializable, SF +from karrio.core.models import PickupCancelRequest, ConfirmationDetails, Message + +from karrio.providers.usps.error import parse_error_response +from karrio.providers.usps.utils import Settings import karrio.lib as lib -import karrio.core.units as units -import karrio.core.models as models -import karrio.providers.usps.error as error -import karrio.providers.usps.utils as provider_utils -import karrio.providers.usps.units as provider_units def parse_pickup_cancel_response( _response: lib.Deserializable[dict], - settings: provider_utils.Settings, -) -> typing.Tuple[models.ConfirmationDetails, typing.List[models.Message]]: + settings: Settings, +) -> Tuple[ConfirmationDetails, List[Message]]: response = _response.deserialize() - messages = error.parse_error_response(response, settings) - success = response.get("ok") == True - - confirmation = ( - models.ConfirmationDetails( + errors = parse_error_response(response, settings) + details = ( + ConfirmationDetails( carrier_id=settings.carrier_id, carrier_name=settings.carrier_name, - operation="Cancel Pickup", - success=success, + success=True, + operation="Pickup Cancel", ) - if success + if not any(errors) else None ) - return confirmation, messages + return details, errors def pickup_cancel_request( - payload: models.PickupCancelRequest, - settings: provider_utils.Settings, -) -> lib.Serializable: - - # map data to convert karrio model to usps specific type - request = dict(confirmationNumber=payload.confirmation_number) + payload: PickupCancelRequest, settings: Settings +) -> Serializable: + request = CarrierPickupCancelRequest( + UserID=settings.username, + PASSWORD=settings.password, + FirmName=payload.address.company_name, + SuiteOrApt=payload.address.address_line1, + Address2=SF.concat_str( + payload.address.address_line1, payload.address.address_line2, join=True + ), + Urbanization=None, + City=payload.address.city, + State=payload.address.state_code, + ZIP5=payload.address.postal_code, + ZIP4=None, + ConfirmationNumber=payload.confirmation_number, + ) - return lib.Serializable(request, lib.to_dict) + return Serializable(request) diff --git a/modules/connectors/usps/karrio/providers/usps/pickup/create.py b/modules/connectors/usps/karrio/providers/usps/pickup/create.py index 2f87b81d7e..f5c7d8dcf2 100644 --- a/modules/connectors/usps/karrio/providers/usps/pickup/create.py +++ b/modules/connectors/usps/karrio/providers/usps/pickup/create.py @@ -1,102 +1,62 @@ -"""Karrio USPS schedule pickup implementation.""" - -import karrio.schemas.usps.pickup_request as usps -import karrio.schemas.usps.pickup_response as pickup - -import typing +from typing import Tuple, List +from karrio.schemas.usps.carrier_pickup_schedule_request import ( + CarrierPickupScheduleRequest, + PackageType, +) +from karrio.core.utils import Serializable, SF +from karrio.core.units import Packages +from karrio.core.models import ( + ShipmentRequest, + PickupRequest, + PickupDetails, + Message, +) + +from karrio.providers.usps.error import parse_error_response +from karrio.providers.usps.utils import Settings import karrio.lib as lib -import karrio.core.units as units -import karrio.core.models as models -import karrio.providers.usps.error as error -import karrio.providers.usps.utils as provider_utils -import karrio.providers.usps.units as provider_units def parse_pickup_response( _response: lib.Deserializable[dict], - settings: provider_utils.Settings, -) -> typing.Tuple[typing.List[models.RateDetails], typing.List[models.Message]]: + settings: Settings, +) -> Tuple[PickupDetails, List[Message]]: response = _response.deserialize() + errors = parse_error_response(response, settings) + details = None - messages = error.parse_error_response(response, settings) - pickup = ( - _extract_details(response, settings) - if "confirmationNumber" in response - else None - ) - - return pickup, messages - + return details, errors -def _extract_details( - data: dict, - settings: provider_utils.Settings, -) -> models.PickupDetails: - details = lib.to_object(pickup.PickupResponseType, data) - return models.PickupDetails( - carrier_id=settings.carrier_id, - carrier_name=settings.carrier_name, - confirmation_number=details.confirmationNumber, - pickup_date=lib.fdate(details.pickupDate), - ) +def pickup_request(payload: PickupRequest, settings: Settings) -> Serializable: + shipments: List[ShipmentRequest] = payload.options.get("shipments", []) + packages = Packages(payload.parcels) - -def pickup_request( - payload: models.PickupRequest, - settings: provider_utils.Settings, -) -> lib.Serializable: - address = lib.to_address(payload.address) - packages = lib.to_packages(payload.parcels) - options = lib.units.Options( - payload.options, - option_type=lib.units.create_enum( - "PickupOptions", - # fmt: off - { - "usps_package_type": lib.OptionEnum("usps_package_type"), - }, - # fmt: on + request = CarrierPickupScheduleRequest( + USERID=settings.username, + PASSWORD=settings.password, + FirstName=payload.address.person_name, + LastName=None, + FirmName=payload.address.company_name, + SuiteOrApt=payload.address.address_line1, + Address2=SF.concat_str( + payload.address.address_line1, payload.address.address_line2, join=True ), - ) - - # map data to convert karrio model to usps specific type - request = usps.PickupRequestType( - pickupDate=lib.fdate(payload.pickup_date), - pickupAddress=usps.PickupAddressType( - firstName=address.person_name, - lastName=None, - firm=address.company_name, - address=usps.AddressType( - streetAddress=address.address_line1, - secondaryAddress=address.address_line2, - city=address.city, - state=address.state, - ZIPCode=lib.to_zip5(address.postal_code), - ZIPPlus4=lib.to_zip4(address.postal_code) or "", - urbanization=None, - ), - contact=[ - usps.ContactType(email=address.email) - for _ in [address.email] - if _ is not None - ], - ), - packages=[ - usps.PackageType( - packageType=options.usps_package_type.state or "OTHER", - packageCount=len(packages), - ) + Urbanization=None, + City=payload.address.city, + State=payload.address.state_code, + ZIP5=payload.address.postal_code, + ZIP4=None, + Phone=payload.address.phone_number, + Extension=None, + Package=[ + PackageType(ServiceType=shipment.service, Count=len(shipment.parcels)) + for shipment in shipments ], - estimatedWeight=packages.weight.LB, - pickupLocation=lib.identity( - usps.PickupLocationType( - packageLocation=payload.package_location, - specialInstructions=payload.instruction, - ) - if any([payload.package_location, payload.instruction]) - else None - ), + EstimatedWeight=packages.weight.LB, + PackageLocation=payload.package_location, + SpecialInstructions=payload.instruction, + EmailAddress=payload.address.email, ) - return lib.Serializable(request, lib.to_dict) + return Serializable(request) diff --git a/modules/connectors/usps/karrio/providers/usps/pickup/update.py b/modules/connectors/usps/karrio/providers/usps/pickup/update.py index 5925a0a41c..64c9fc23f4 100644 --- a/modules/connectors/usps/karrio/providers/usps/pickup/update.py +++ b/modules/connectors/usps/karrio/providers/usps/pickup/update.py @@ -1,109 +1,65 @@ -"""Karrio USPS update pickup implementation.""" +from typing import Tuple, List +from karrio.schemas.usps.carrier_pickup_change_request import ( + CarrierPickupChangeRequest, + PackageType, +) +from karrio.core.units import Packages +from karrio.core.utils import Serializable, SF +from karrio.core.models import ( + ShipmentRequest, + PickupUpdateRequest, + PickupDetails, + Message, +) -import karrio.schemas.usps.pickup_update_request as usps -import karrio.schemas.usps.pickup_update_response as pickup - -import typing +from karrio.providers.usps.error import parse_error_response +from karrio.providers.usps.utils import Settings import karrio.lib as lib -import karrio.core.units as units -import karrio.core.models as models -import karrio.providers.usps.error as error -import karrio.providers.usps.utils as provider_utils -import karrio.providers.usps.units as provider_units def parse_pickup_update_response( _response: lib.Deserializable[dict], - settings: provider_utils.Settings, -) -> typing.Tuple[typing.List[models.RateDetails], typing.List[models.Message]]: + settings: Settings, +) -> Tuple[PickupDetails, List[Message]]: response = _response.deserialize() + errors = parse_error_response(response, settings) + details = None - messages = error.parse_error_response(response, settings) - pickup = ( - _extract_details(response, settings) - if "confirmationNumber" in response - else None - ) - - return pickup, messages - - -def _extract_details( - data: dict, - settings: provider_utils.Settings, -) -> models.PickupDetails: - details = lib.to_object(pickup.PickupUpdateResponseType, data) - - return models.PickupDetails( - carrier_id=settings.carrier_id, - carrier_name=settings.carrier_name, - confirmation_number=details.confirmationNumber, - pickup_date=lib.fdate(details.pickupDate), - ) + return details, errors def pickup_update_request( - payload: models.PickupUpdateRequest, - settings: provider_utils.Settings, -) -> lib.Serializable: - address = lib.to_address(payload.address) - packages = lib.to_packages(payload.parcels) - options = lib.units.Options( - payload.options, - option_type=lib.units.create_enum( - "PickupOptions", - # fmt: off - { - "usps_package_type": lib.OptionEnum("usps_package_type"), - }, - # fmt: on - ), - ) + payload: PickupUpdateRequest, settings: Settings +) -> Serializable: + shipments: List[ShipmentRequest] = payload.options.get("shipments", []) + packages = Packages(payload.parcels) - # map data to convert karrio model to usps specific type - request = usps.PickupUpdateRequestType( - pickupDate=lib.fdate(payload.pickup_date), - carrierPickupRequest=usps.CarrierPickupRequestType( - pickupDate=lib.fdate(payload.pickup_date), - pickupAddress=usps.PickupAddressType( - firstName=address.person_name, - lastName=None, - firm=address.company_name, - address=usps.AddressType( - streetAddress=address.address_line1, - secondaryAddress=address.address_line2, - city=address.city, - state=address.state, - ZIPCode=lib.to_zip5(address.postal_code), - ZIPPlus4=lib.to_zip4(address.postal_code) or "", - urbanization=None, - ), - contact=[ - usps.ContactType(email=address.email) - for _ in [address.email] - if _ is not None - ], - ), - packages=[ - usps.PackageType( - packageType=options.usps_package_type.state or "OTHER", - packageCount=len(packages), - ) - ], - estimatedWeight=packages.weight.LB, - pickupLocation=lib.identity( - usps.PickupLocationType( - packageLocation=payload.package_location, - specialInstructions=payload.instruction, - ) - if any([payload.package_location, payload.instruction]) - else None - ), + request = CarrierPickupChangeRequest( + USERID=settings.username, + PASSWORD=settings.password, + FirstName=payload.address.person_name, + LastName=None, + FirmName=payload.address.company_name, + SuiteOrApt=payload.address.address_line1, + Address2=SF.concat_str( + payload.address.address_line1, payload.address.address_line2, join=True ), + Urbanization=None, + City=payload.address.city, + State=payload.address.state_code, + ZIP5=payload.address.postal_code, + ZIP4=None, + Phone=payload.address.phone_number, + Extension=None, + Package=[ + PackageType(ServiceType=shipment.service, Count=len(shipment.parcels)) + for shipment in shipments + ], + EstimatedWeight=packages.weight.LB, + PackageLocation=payload.package_location, + SpecialInstructions=payload.instruction, + ConfirmationNumber=payload.confirmation_number, + EmailAddress=payload.address.email, ) - return lib.Serializable( - request, - lib.to_dict, - dict(confirmationNumber=payload.confirmation_number), - ) + return Serializable(request) diff --git a/modules/connectors/usps/karrio/providers/usps/rate.py b/modules/connectors/usps/karrio/providers/usps/rate.py index 2c91d30996..ed60a70134 100644 --- a/modules/connectors/usps/karrio/providers/usps/rate.py +++ b/modules/connectors/usps/karrio/providers/usps/rate.py @@ -1,67 +1,74 @@ -"""Karrio USPS rating API implementation.""" +from datetime import datetime +from karrio.schemas.usps.rate_v4_response import PostageType, SpecialServiceType +from karrio.schemas.usps.rate_v4_request import ( + RateV4Request, + PackageType, + SpecialServicesType, + ShipDateType, +) -import karrio.schemas.usps.rate_request as usps -import karrio.schemas.usps.rate_response as rating - -import time import typing import karrio.lib as lib import karrio.core.units as units import karrio.core.models as models import karrio.core.errors as errors -import karrio.providers.usps.error as error -import karrio.providers.usps.utils as provider_utils +import karrio.providers.usps.error as provider_error import karrio.providers.usps.units as provider_units +import karrio.providers.usps.utils as provider_utils def parse_rate_response( - _response: lib.Deserializable[dict], + _response: lib.Deserializable[lib.Element], settings: provider_utils.Settings, ) -> typing.Tuple[typing.List[models.RateDetails], typing.List[models.Message]]: - responses = _response.deserialize() - - messages = error.parse_error_response(responses, settings) - rates = lib.to_multi_piece_rates( - [ - ( - f"{_}", - [_extract_details(rate, settings) for rate in response["rateOptions"]], - ) - for _, response in enumerate(responses, start=1) - if response.get("rateOptions") is not None - ] - ) - - return rates, messages + response = _response.deserialize() + rates: typing.List[models.RateDetails] = [ + _extract_details(package, settings) + for package in lib.find_element("Postage", response) + ] + return rates, provider_error.parse_error_response(response, settings) def _extract_details( - data: dict, - settings: provider_utils.Settings, + postage_node: lib.Element, settings: provider_utils.Settings ) -> models.RateDetails: - rate = lib.to_object(rating.RateOptionType, data) - mail_class = rate.rates[0].mailClass - service = provider_units.ShippingService.map(mail_class) - charges = [ - ("Base Charge", lib.to_money(rate.totalBasePrice)), - *[(_.description, lib.to_money(_.price)) for _ in rate.rates], - *[(_.name, lib.to_money(_.price)) for _ in rate.extraServices], - ] + postage: PostageType = lib.to_object(PostageType, postage_node) + + service = provider_units.ServiceClassID.map(str(postage.CLASSID)) + charges: typing.List[SpecialServiceType] = getattr( + postage.SpecialServices, "SpecialService", [] + ) + rate = lib.to_decimal( + ( + lib.find_element("CommercialPlusRate", postage_node, first=True) + or lib.find_element("CommercialRate", postage_node, first=True) + or lib.find_element("Rate", postage_node, first=True) + ).text + ) + commitment_date_node = lib.find_element("CommitmentDate", postage_node, first=True) + estimated_date = lib.to_date(getattr(commitment_date_node, "text", None)) + transit = ( + (estimated_date.date() - datetime.now().date()).days + if estimated_date is not None + else None + ) return models.RateDetails( - carrier_id=settings.carrier_id, carrier_name=settings.carrier_name, + carrier_id=settings.carrier_id, service=service.name_or_key, - total_charge=lib.to_money(rate.totalPrice), - currency="USD", + total_charge=rate, + currency=units.Currency.USD.name, + transit_days=transit, extra_charges=[ - models.ChargeDetails(name=name, currency="USD", amount=amount) - for name, amount in charges + models.ChargeDetails( + name=charge.ServiceName, + amount=lib.to_decimal(charge.Price), + currency=units.Currency.USD.name, + ) + for charge in charges ], - meta=dict( - service_name=service.name or mail_class, - zone=lib.failsafe(lambda: rate.rates[0].zone), - ), + meta=dict(service_name=(service.name or postage.MailService)), ) @@ -69,67 +76,93 @@ def rate_request( payload: models.RateRequest, settings: provider_utils.Settings, ) -> lib.Serializable: - shipper = lib.to_address(payload.shipper) - recipient = lib.to_address(payload.recipient) + """Create the appropriate USPS rate request depending on the destination + + :param payload: Karrio unified API rate request data + :param settings: USPS connection and auth settings + :return: a domestic or international USPS compatible request + :raises: an OriginNotServicedError when origin country is not serviced by the carrier + """ if ( - shipper.country_code is not None - and shipper.country_code != units.Country.US.name + payload.shipper.country_code is not None + and payload.shipper.country_code != units.Country.US.name ): - raise errors.OriginNotServicedError(shipper.country_code) + raise errors.OriginNotServicedError(payload.shipper.country_code) if ( - recipient.country_code is not None - and recipient.country_code != units.Country.US.name + payload.recipient.country_code is not None + and payload.recipient.country_code != units.Country.US.name ): - raise errors.DestinationNotServicedError(recipient.country_code) + raise errors.DestinationNotServicedError(payload.recipient.country_code) - services = lib.to_services(payload.services, provider_units.ShippingService) + package = lib.to_packages( + payload.parcels, package_option_type=provider_units.ShippingOption + ).single + container = provider_units.PackagingType[package.packaging_type or "your_packaging"] options = lib.to_shipping_options( payload.options, + package_options=package.options, initializer=provider_units.shipping_options_initializer, ) - packages = lib.to_packages( - payload.parcels, - options=options, - package_option_type=provider_units.ShippingOption, - shipping_options_initializer=provider_units.shipping_options_initializer, + service = ( + units.Services(payload.services, provider_units.ShipmentService).first + or provider_units.ShipmentService.usps_all ) - # map data to convert karrio model to usps specific type - request = [ - usps.RateRequestType( - originZIPCode=shipper.postal_code, - destinationZIPCode=recipient.postal_code, - weight=package.weight.LB, - length=package.length.IN, - width=package.width.IN, - height=package.height.IN, - # mailClass=None, - mailClasses=[ - service.value - for service in ( - services - if any(services) - else [provider_units.ShippingService.usps_all] - ) - ], - priceType=options.usps_price_type.state or "RETAIL", - mailingDate=lib.fdate( - package.options.shipment_date.state or time.strftime("%Y-%m-%d") - ), - accountType=settings.account_type or "EPS", - accountNumber=settings.account_number, - itemValue=lib.identity( - package.items.value_amount if len(package.items) > 0 else None - ), - extraServices=[ - lib.to_int(_.code) - for __, _ in options.items() - if __ not in provider_units.CUSTOM_OPTIONS - ], - ) - for package in packages - ] + request = RateV4Request( + USERID=settings.username, + PASSWORD=settings.password, + Revision="2", + Package=[ + PackageType( + ID=0, + Service=service.value, + FirstClassMailType=( + provider_units.FirstClassMailType[container.name].value + if "first_class" in service.value + else None + ), + ZipOrigination=payload.shipper.postal_code, + ZipDestination=payload.recipient.postal_code, + Pounds=0, + Ounces=package.weight.OZ, + Container=container.value, + Width=package.width.IN, + Length=package.length.IN, + Height=package.height.IN, + Girth=( + package.girth.value if package.packaging_type == "tube" else None + ), + Value=options.declared_value.state, + AmountToCollect=options.cash_on_delivery.state, + SpecialServices=( + SpecialServicesType( + SpecialService=[option.code for _, option in options.items()] + ) + if any(options.items()) + else None + ), + Content=None, + GroundOnly=options.usps_option_ground_only.state, + SortBy=( + provider_units.SortLevelType[container.name].value + if service.value in ["All", "Online"] + else None + ), + Machinable=(options.usps_option_machinable_item.state or False), + ReturnLocations=options.usps_option_return_service_info.state, + ReturnServiceInfo=options.usps_option_return_service_info.state, + DropOffTime=( + "13:30" if options.shipment_date.state is not None else None + ), + ShipDate=( + ShipDateType(valueOf_=lib.fdate(options.shipment_date.state)) + if options.shipment_date.state is not None + else None + ), + ) + ], + ) - return lib.Serializable(request, lib.to_dict) + return lib.Serializable(request, lib.to_xml) diff --git a/modules/connectors/usps/karrio/providers/usps/shipment/__init__.py b/modules/connectors/usps/karrio/providers/usps/shipment/__init__.py index 70abebcdbe..a225404c70 100644 --- a/modules/connectors/usps/karrio/providers/usps/shipment/__init__.py +++ b/modules/connectors/usps/karrio/providers/usps/shipment/__init__.py @@ -1,9 +1,2 @@ - -from karrio.providers.usps.shipment.create import ( - parse_shipment_response, - shipment_request, -) -from karrio.providers.usps.shipment.cancel import ( - parse_shipment_cancel_response, - shipment_cancel_request, -) +from karrio.providers.usps.shipment.create import parse_shipment_response, shipment_request +from karrio.providers.usps.shipment.cancel import parse_shipment_cancel_response, shipment_cancel_request diff --git a/modules/connectors/usps/karrio/providers/usps/shipment/cancel.py b/modules/connectors/usps/karrio/providers/usps/shipment/cancel.py index 19ade56991..687f7edb07 100644 --- a/modules/connectors/usps/karrio/providers/usps/shipment/cancel.py +++ b/modules/connectors/usps/karrio/providers/usps/shipment/cancel.py @@ -1,53 +1,53 @@ -import typing +from typing import Tuple, List +from karrio.schemas.usps.evs_cancel_request import eVSCancelRequest +from karrio.schemas.usps.evs_cancel_response import eVSCancelResponse +from karrio.core.utils import Serializable, Element, XP +from karrio.core.models import ShipmentCancelRequest, ConfirmationDetails, Message + +from karrio.providers.usps.error import parse_error_response +from karrio.providers.usps.utils import Settings import karrio.lib as lib -import karrio.core.models as models -import karrio.providers.usps.error as error -import karrio.providers.usps.utils as provider_utils -import karrio.providers.usps.units as provider_units def parse_shipment_cancel_response( - _response: lib.Deserializable[typing.List[typing.Tuple[str, dict]]], - settings: provider_utils.Settings, -) -> typing.Tuple[models.ConfirmationDetails, typing.List[models.Message]]: - responses = _response.deserialize() - messages: typing.List[models.Message] = sum( - [ - error.parse_error_response(response, settings, tracking_number=_) - for _, response in responses - ], - start=[], - ) - success = all([_["ok"] for __, _ in responses]) + _response: lib.Deserializable[Element], + settings: Settings, +) -> Tuple[ConfirmationDetails, List[Message]]: + response = _response.deserialize() + errors: List[Message] = parse_error_response(response, settings) + cancel_response = XP.to_object(eVSCancelResponse, response) + + if cancel_response.Status != "Cancelled": + errors.append( + Message( + carrier_name=settings.carrier_name, + carrier_id=settings.carrier_id, + message=cancel_response.Reason, + code=cancel_response.Status, + ) + ) - confirmation = ( - models.ConfirmationDetails( + details = ( + ConfirmationDetails( carrier_id=settings.carrier_id, carrier_name=settings.carrier_name, - operation="Cancel Shipment", - success=success, + operation="Shipment Cancel", + success=True, ) - if success + if not any(errors) else None ) - return confirmation, messages + return details, errors def shipment_cancel_request( - payload: models.ShipmentCancelRequest, - settings: provider_utils.Settings, -) -> lib.Serializable: - - # map data to convert karrio model to usps specific type - request = [ - dict(trackingNumber=_) - for _ in set( - [ - payload.shipment_identifier, - *((payload.options or {}).get("shipment_identifiers") or []), - ] - ) - ] + payload: ShipmentCancelRequest, settings: Settings +) -> Serializable: + request = eVSCancelRequest( + USERID=settings.username, + PASSWORD=settings.password, + BarcodeNumber=payload.shipment_identifier, + ) - return lib.Serializable(request, lib.to_dict) + return Serializable(request, XP.export) diff --git a/modules/connectors/usps/karrio/providers/usps/shipment/create.py b/modules/connectors/usps/karrio/providers/usps/shipment/create.py index e21874b2eb..6f1df6d628 100644 --- a/modules/connectors/usps/karrio/providers/usps/shipment/create.py +++ b/modules/connectors/usps/karrio/providers/usps/shipment/create.py @@ -1,7 +1,5 @@ -"""Karrio USPS create label implementation.""" - -import karrio.schemas.usps.label_request as usps -import karrio.schemas.usps.label_response as shipping +import karrio.schemas.usps.evs_request as usps +import karrio.schemas.usps.evs_response as shipping import time import typing @@ -9,57 +7,38 @@ import karrio.core.units as units import karrio.core.models as models import karrio.core.errors as errors -import karrio.providers.usps.error as error -import karrio.providers.usps.utils as provider_utils +import karrio.providers.usps.error as provider_error import karrio.providers.usps.units as provider_units +import karrio.providers.usps.utils as provider_utils def parse_shipment_response( - _response: lib.Deserializable[typing.List[dict]], - settings: provider_utils.Settings, -) -> typing.Tuple[typing.List[models.RateDetails], typing.List[models.Message]]: - responses = _response.deserialize() - - shipment = lib.to_multi_piece_shipment( - [ - ( - f"{_}", - _extract_details(response, settings, _response.ctx), - ) - for _, response in enumerate(responses, start=1) - if response.get("error") is None - ] - ) - messages: typing.List[models.Message] = sum( - [error.parse_error_response(response, settings) for response in responses], - start=[], + _response: lib.Deserializable[lib.Element], settings: provider_utils.Settings +) -> typing.Tuple[models.ShipmentDetails, typing.List[models.Message]]: + response = _response.deserialize() + errors = provider_error.parse_error_response(response, settings) + details = ( + _extract_details(response, settings) + if len(lib.find_element("BarcodeNumber", response)) > 0 + else None ) - return shipment, messages + return details, errors def _extract_details( - data: dict, - settings: provider_utils.Settings, - ctx: dict = None, + response: lib.Element, settings: provider_utils.Settings ) -> models.ShipmentDetails: - details = lib.to_object(shipping.LabelResponseType, data) - label = details.labelImage - invoice = details.receiptImage - label_type = ctx.get("label_type", "PDF") + shipment = lib.to_object(shipping.eVSResponse, response) return models.ShipmentDetails( - carrier_id=settings.carrier_id, carrier_name=settings.carrier_name, - tracking_number=details.labelMetadata.trackingNumber, - shipment_identifier=details.labelMetadata.trackingNumber, - label_type=label_type, - docs=models.Documents(label=label, invoice=invoice), + carrier_id=settings.carrier_id, + tracking_number=shipment.BarcodeNumber, + shipment_identifier=shipment.BarcodeNumber, + docs=models.Documents(label=shipment.LabelImage), meta=dict( - SKU=details.labelMetadata.SKU, - postage=details.labelMetadata.postage, - routingInformation=details.labelMetadata.routingInformation, - labelBrokerID=details.labelMetadata.labelBrokerID, + carrier_tracking_link=settings.tracking_url.format(shipment.BarcodeNumber), ), ) @@ -83,181 +62,160 @@ def shipment_request( ): raise errors.DestinationNotServicedError(recipient.country_code) - return_address = lib.to_address(payload.return_address) - service = provider_units.ShippingService.map(payload.service).value_or_key + package = lib.to_packages( + payload.parcels, package_option_type=provider_units.ShippingOption + ).single + service = provider_units.ServiceType.map(payload.service).value_or_key options = lib.to_shipping_options( payload.options, + package_options=package.options, initializer=provider_units.shipping_options_initializer, ) - packages = lib.to_packages( - payload.parcels, - options=options, - package_option_type=provider_units.ShippingOption, - shipping_options_initializer=provider_units.shipping_options_initializer, + + customs = lib.to_customs_info(payload.customs or models.Customs(commodities=[])) + label_format = provider_units.LabelFormat[ + payload.label_type or "usps_6_x_4_label" + ].value + redirect_address = models.Address( + **(options.usps_option_redirect_non_delivery.state or {}) ) - pickup_location = lib.to_address(options.hold_for_pickup_address.state) - label_type = provider_units.LabelType.map(payload.label_type).value or "PDF" - # map data to convert karrio model to usps specific type - request = [ - usps.LabelRequestType( - imageInfo=usps.ImageInfoType( - imageType=label_type, - labelType="4X6LABEL", - # shipInfo=None, - receiptOption="SEPARATE_PAGE", - suppressPostage=None, - suppressMailDate=None, - returnLabel=None, - ), - toAddress=usps.AddressType( - streetAddress=recipient.address_line1, - secondaryAddress=recipient.address_line2, - city=recipient.city, - state=recipient.state, - ZIPCode=lib.to_zip5(recipient.postal_code) or "", - ZIPPlus4=lib.to_zip4(recipient.postal_code) or "", - urbanization=None, - firstName=recipient.person_name, - lastName=None, - firm=recipient.company_name, - phone=recipient.phone_number, - email=recipient.email, - ignoreBadAddress=True, - platformUserId=None, - parcelLockerDelivery=None, - holdForPickup=package.options.usps_hold_for_pickup.state, - facilityId=package.options.usps_facility_id.state, - ), - fromAddress=usps.AddressType( - streetAddress=shipper.address_line1, - secondaryAddress=shipper.address_line2, - city=shipper.city, - state=shipper.state, - ZIPCode=lib.to_zip4(shipper.postal_code) or "", - ZIPPlus4=lib.to_zip5(shipper.postal_code) or "", - urbanization=None, - firstName=shipper.person_name, - lastName=None, - firm=shipper.company_name, - phone=shipper.phone_number, - email=shipper.email, - ignoreBadAddress=True, - platformUserId=None, - parcelLockerDelivery=None, - holdForPickup=None, - facilityId=None, - ), - senderAddress=usps.AddressType( - streetAddress=shipper.address_line1, - secondaryAddress=shipper.address_line2r, - city=shipper.city, - state=shipper.state, - ZIPCode=lib.to_zip4(shipper.postal_code) or "", - ZIPPlus4=lib.to_zip5(shipper.postal_code) or "", - urbanization=None, - firstName=shipper.person_name, - lastName=None, - firm=shipper.company_name, - phone=shipper.phone_number, - email=shipper.email, - ignoreBadAddress=True, - platformUserId=None, - parcelLockerDelivery=None, - holdForPickup=None, - facilityId=None, - ), - returnAddress=lib.identity( - usps.AddressType( - streetAddress=return_address.address_line1, - secondaryAddress=return_address.address_line2r, - city=return_address.city, - state=return_address.state, - ZIPCode=lib.to_zip4(return_address.postal_code) or "", - ZIPPlus4=lib.to_zip5(return_address.postal_code) or "", - urbanization=None, - firstName=return_address.person_name, - lastName=None, - firm=return_address.company_name, - phone=return_address.phone_number, - email=return_address.email, - ignoreBadAddress=True, - platformUserId=None, - parcelLockerDelivery=None, - holdForPickup=None, - facilityId=None, - ) - if payload.return_address is not None - else None - ), - packageDescription=usps.PackageDescriptionType( - weightUOM="lb", - weight=package.weight.LB, - dimensionsUOM="in", - length=package.length.IN, - height=package.height.IN, - width=package.width.IN, - girth=package.girth.value, - mailClass=service, - rateIndicator=package.options.usps_rate_indicator.state or "SP", - processingCategory=lib.identity( - package.options.usps_processing_category.state or "NON_MACHINABLE" - ), - destinationEntryFacilityType=lib.identity( - package.options.usps_destination_facility_type.state or "NONE" - ), - destinationEntryFacilityAddress=lib.identity( - usps.DestinationEntryFacilityAddressType( - streetAddress=pickup_location.address_line1, - secondaryAddress=pickup_location.address_line2r, - city=pickup_location.city, - state=pickup_location.state, - ZIPCode=lib.to_zip4(pickup_location.postal_code) or "", - ZIPPlus4=lib.to_zip5(pickup_location.postal_code) or "", - urbanization=None, - ) - if package.options.hold_for_pickup_address.state is not None - else None - ), - packageOptions=lib.identity( - usps.PackageOptionsType( - packageValue=package.total_value, - nonDeliveryOption=None, - redirectAddress=None, - contentType=None, - generateGXEvent=None, - containers=[], - ancillaryServiceEndorsements=None, - originalPackage=None, - ) - if (package.total_value or 0.0) > 0.0 - else None - ), - customerReference=[ - usps.CustomerReferenceType( - referenceNumber=reference, - printReferenceNumber=True, + request = usps.eVSRequest( + USERID=settings.username, + PASSWORD=settings.password, + Option=None, + Revision="1", + ImageParameters=usps.ImageParametersType( + ImageParameter=label_format, + LabelSequence=usps.LabelSequenceType(PackageNumber=1, TotalPackages=1), + ), + FromName=shipper.person_name, + FromFirm=shipper.company_name or "N/A", + FromAddress1=shipper.address_line2 or "", + FromAddress2=shipper.street, + FromCity=shipper.city, + FromState=shipper.state_code, + FromZip5=lib.to_zip5(shipper.postal_code) or "", + FromZip4=lib.to_zip4(shipper.postal_code) or "", + FromPhone=provider_utils.parse_phone_number(shipper.phone_number), + POZipCode=None, + AllowNonCleansedOriginAddr=( + options.usps_option_allow_non_cleansed_origin_addr.state + if options.usps_option_allow_non_cleansed_origin_addr.state is not None + else True + ), + ToName=recipient.person_name, + ToFirm=recipient.company_name or "N/A", + ToAddress1=recipient.address_line2 or "", + ToAddress2=recipient.street, + ToCity=recipient.city, + ToState=recipient.state_code, + ToZip5=lib.to_zip5(recipient.postal_code) or "", + ToZip4=lib.to_zip4(recipient.postal_code) or "", + ToPhone=provider_utils.parse_phone_number(recipient.phone_number), + POBox=None, + ToContactPreference=None, + ToContactMessaging=recipient.email, + ToContactEmail=recipient.email, + AllowNonCleansedDestAddr=( + lib.to_json(options.usps_option_allow_non_cleansed_dest_addr.state) + if options.usps_option_allow_non_cleansed_dest_addr.state is not None + else True + ), + WeightInOunces=package.weight.OZ, + ServiceType=service, + Container=provider_units.PackagingType[ + package.packaging_type or "variable" + ].value, + Width=package.width.IN, + Length=package.length.IN, + Height=package.height.IN, + Girth=(package.girth.value if package.packaging_type == "tube" else None), + Machinable=options.usps_option_machinable_item.state, + ProcessingCategory=None, + PriceOptions=None, + InsuredAmount=provider_units.ShippingOption.insurance_from(options), + AddressServiceRequested=None, + ExpressMailOptions=None, + ShipDate=lib.fdatetime( + (options.shipment_date.state or time.strftime("%Y-%m-%d")), + current_format="%Y-%m-%d", + output_format="%m/%d/%Y", + ), + CustomerRefNo=None, + # ExtraServices=( + # usps.ExtraServicesType( + # ExtraService=[option.code for _, option in options.items()] + # ) + # if any(options.items()) + # else None + # ), + CRID=settings.customer_registration_id, + MID=settings.mailer_id, + LogisticsManagerMID=settings.logistics_manager_mailer_id, + VendorCode=None, + VendorProductVersionNumber=None, + SenderName=shipper.contact, + SenderEMail=shipper.email, + RecipientName=recipient.contact, + RecipientEMail=recipient.email, + ReceiptOption="SEPARATE PAGE", + ImageType="PDF", + HoldForManifest=None, + NineDigitRoutingZip=None, + ShipInfo=options.usps_option_ship_info.state, + CarrierRelease=None, + DropOffTime=None, + ReturnCommitments=None, + PrintCustomerRefNo=None, + Content=None, + ShippingContents=( + usps.ShippingContentsType( + ItemDetail=[ + usps.ItemDetailType( + Description=lib.text(item.description or item.title or "N/A"), + Quantity=item.quantity, + Value=item.value_amount, + NetPounds=0, + NetOunces=units.Weight( + item.weight, units.WeightUnit[item.weight_unit or "LB"] + ).OZ, + HSTariffNumber=item.hs_code or item.sku, + CountryOfOrigin=lib.to_country_name(item.origin_country), ) - for reference in [payload.reference] - if reference is not None - ], - extraServices=[ - lib.to_int(_.code) - for __, _ in package.options.items() - if _.name not in provider_units.CUSTOM_OPTIONS - ], - mailingDate=lib.fdate( - package.options.shipment_date.state or time.strftime("%Y-%m-%d") - ), - carrierRelease=package.options.usps_carrier_release.state, - physicalSignatureRequired=package.options.usps_physical_signature_required.state, - inductionZIPCode=lib.identity( - return_address.postal_code or shipper.postal_code - ), - ), - customsForm=None, - ) - for package in packages - ] + for item in customs.commodities + ] + ) + if payload.customs is not None + else None + ), + CustomsContentType=( + provider_units.ContentType[customs.content_type or "other"].value + if payload.customs is not None + else None + ), + ContentComments=None, + RestrictionType=None, + RestrictionComments=None, + AESITN=customs.options.aes.state, + ImportersReference=None, + ImportersContact=None, + ExportersReference=None, + ExportersContact=None, + InvoiceNumber=customs.invoice, + LicenseNumber=customs.options.license_number.state, + CertificateNumber=customs.options.certificate_number.state, + NonDeliveryOption=provider_units.ShippingOption.non_delivery_from(options), + AltReturnAddress1=redirect_address.address_line1, + AltReturnAddress2=redirect_address.address_line2, + AltReturnAddress3=None, + AltReturnAddress4=None, + AltReturnAddress5=None, + AltReturnAddress6=None, + AltReturnCountry=None, + LabelImportType=None, + ChargebackCode=None, + TrackingRetentionPeriod=None, + ) - return lib.Serializable(request, lib.to_dict, dict(label_type=label_type)) + return lib.Serializable(request, lib.to_xml) diff --git a/modules/connectors/usps/karrio/providers/usps/tracking.py b/modules/connectors/usps/karrio/providers/usps/tracking.py index daa392bc3f..b526ce8356 100644 --- a/modules/connectors/usps/karrio/providers/usps/tracking.py +++ b/modules/connectors/usps/karrio/providers/usps/tracking.py @@ -1,11 +1,7 @@ -"""Karrio USPS rating API implementation.""" - -# import karrio.schemas.usps.tracking_request as usps -import karrio.schemas.usps.tracking_response as tracking - +import karrio.schemas.usps.track_field_request as usps +import karrio.schemas.usps.track_response as tracking import typing import karrio.lib as lib -import karrio.core.units as units import karrio.core.models as models import karrio.providers.usps.error as error import karrio.providers.usps.utils as provider_utils @@ -13,77 +9,74 @@ def parse_tracking_response( - _response: lib.Deserializable[typing.List[typing.Tuple[str, dict]]], + _response: lib.Deserializable[lib.Element], settings: provider_utils.Settings, ) -> typing.Tuple[typing.List[models.TrackingDetails], typing.List[models.Message]]: - responses = _response.deserialize() - - messages: typing.List[models.Message] = sum( - [ - error.parse_error_response(response, settings, tracking_number=_) - for _, response in responses - ], - start=[], - ) - tracking_details = [ - _extract_details(details, settings) - for _, details in responses - if "error" not in details + response = _response.deserialize() + tracks_info = lib.find_element("TrackInfo", response) + details = [ + _extract_details(node, settings) + for node in tracks_info + if len(lib.find_element("TrackDetail", node)) > 0 ] - return tracking_details, messages + return details, error.parse_error_response(response, settings) def _extract_details( - data: dict, + node: lib.Element, settings: provider_utils.Settings, ) -> models.TrackingDetails: - details = lib.to_object(tracking.TrackingResponseType, data) + info = lib.to_object(tracking.TrackInfoType, node) + events: typing.List[tracking.TrackDetailType] = [ + *([info.TrackSummary] or []), + *info.TrackDetail, + ] + delivered = info.StatusCategory.lower() == "delivered" + expected_delivery = lib.fdate( + info.ExpectedDeliveryDate or info.PredictedDeliveryDate, + "%B %d, %Y", + ) status = next( ( status.name for status in list(provider_units.TrackingStatus) - if getattr(details, "status", None) in status.value + if str(getattr(events[0], "EventCode", None)) in status.value ), provider_units.TrackingStatus.in_transit.name, ) return models.TrackingDetails( - carrier_id=settings.carrier_id, carrier_name=settings.carrier_name, - tracking_number=details.trackingNumber, + carrier_id=settings.carrier_id, + tracking_number=info.ID, + estimated_delivery=expected_delivery, + delivered=delivered, + status=status, events=[ models.TrackingEvent( - date=lib.fdate(event.eventTimestamp, "%Y-%m-%dT%H:%M:%SZ"), - description=event.name, - code=event.eventType, - time=lib.flocaltime(event.eventTimestamp, "%Y-%m-%dT%H:%M:%SZ"), - location=lib.text( - event.eventCity, - event.eventZIP, - event.eventState, - event.eventCountry, + code=str(event.EventCode), + date=lib.fdate(event.EventDate, "%B %d, %Y"), + time=lib.flocaltime(event.EventTime, "%H:%M %p"), + description=event.Event, + location=lib.join( + event.EventCity, + event.EventState, + event.EventCountry, + str(event.EventZIPCode or ""), + join=True, separator=", ", ), ) - for event in details.trackingEvents + for event in events ], - estimated_delivery=lib.fdate( - details.expectedDeliveryTimeStamp, - "%Y-%m-%dT%H:%M:%SZ", - ), - delivered=status == "delivered", - status=status, info=models.TrackingInfo( - # fmt: off - carrier_tracking_link=settings.tracking_url.format(details.trackingNumber), - expected_delivery=lib.fdate(details.expectedDeliveryTimeStamp, "%Y-%m-%dT%H:%M:%SZ"), - shipment_service=provider_units.ShippingService.map(details.serviceTypeCode).name_or_key, - shipment_origin_country=details.originCountry, - shipment_origin_postal_code=details.originZIP, - shipment_destination_country=details.destinationCountryCode, - shipment_destination_postal_code=details.destinationZIP, - # fmt: on + carrier_tracking_link=settings.tracking_url.format(info.ID), + shipment_destination_postal_code=info.DestinationZip, + shipment_destination_country=info.DestinationCountryCode, + shipment_origin_country=info.OriginCountryCode, + shipment_origin_postal_code=info.OriginZip, + shipment_service=info.Class, ), ) @@ -92,8 +85,20 @@ def tracking_request( payload: models.TrackingRequest, settings: provider_utils.Settings, ) -> lib.Serializable: + request = usps.TrackFieldRequest( + USERID=settings.username, + PASSWORD=settings.password, + Revision="1", + ClientIp="127.0.0.1", + SourceId="Karrio", + TrackID=[ + usps.TrackIDType( + ID=tracking_number, + DestinationZipCode=None, + MailingDate=None, + ) + for tracking_number in payload.tracking_numbers + ], + ) - # map data to convert karrio model to usps specific type - request = payload.tracking_numbers - - return lib.Serializable(request, lib.to_dict) + return lib.Serializable(request, lib.to_xml) diff --git a/modules/connectors/usps/karrio/providers/usps/units.py b/modules/connectors/usps/karrio/providers/usps/units.py index 230506199c..1678a7173e 100644 --- a/modules/connectors/usps/karrio/providers/usps/units.py +++ b/modules/connectors/usps/karrio/providers/usps/units.py @@ -1,198 +1,403 @@ +"""Karrio USPS enumerations module""" + +import typing import karrio.lib as lib -import karrio.core.units as units -class PackagingType(lib.StrEnum): - """Carrier specific packaging type""" +class ContentType(lib.Enum): + cremated_remains = "CREMATEDREMAINS" + merchandise = "MERCHANDISE" + sample = "SAMPLE" + gift = "GIFT" + documents = "DOCUMENTS" + return_merchandise = "RETURN" + humanitarian = "HUMANITARIAN" + dangerousgoods = "DANGEROUSGOODS" + nonnegotiabledocument = "NONNEGOTIABLEDOCUMENT" + pharmacuticals = "PHARMACUTICALS" + medicalsupplies = "MEDICALSUPPLIES" + other = "OTHER" - PACKAGE = "PACKAGE" - """ Unified Packaging type mapping """ - envelope = PACKAGE - pak = PACKAGE - tube = PACKAGE - pallet = PACKAGE - small_box = PACKAGE - medium_box = PACKAGE - your_packaging = PACKAGE - - -class ContentType(lib.StrEnum): - HAZMAT = "HAZMAT" - CREMATED_REMAINS = "CREMATED_REMAINS" - BEES = "BEES" - DAY_OLD_POULTRY = "DAY_OLD_POULTRY" - ADULT_BIRDS = "ADULT_BIRDS" - OTHER_LIVES = "OTHER_LIVES" - PERISHABLE = "PERISHABLE" - PHARMACEUTICALS = "PHARMACEUTICALS" - MEDICAL_SUPPLIES = "MEDICAL_SUPPLIES" - FRUITS = "FRUITS" - VEGETABLES = "VEGETABLES" - LIVE_PLANTS = "LIVE_PLANTS" - - -class LabelType(lib.StrEnum): - """Carrier specific label type""" - - PDF = "PDF" - TIFF = "TIFF" - JPG = "JPG" - SVG = "SVG" - ZPL203DPI = "ZPL203DPI" - ZPL300DPI = "ZPL300DPI" - LABEL_BROKER = "LABEL_BROKER" - NONE = "NONE" +class LabelFormat(lib.Enum): + usps_barcode_only = "BARCODE ONLY" + usps_crop = "CROP" + usps_4_x_6_label = "4X6LABEL" + usps_4_x_6_label_l = "4X6LABELL" + usps_6_x_4_label = "6X4LABEL" + usps_4_x_6_label_p = "4X6LABELP" + usps_4_x_6_label_p_page = "4X6LABELP PAGE" + usps_4_x_6_zpl_203_dpi = "4X6ZPL203DPI" + usps_4_x_6_zpl_300_dpi = "4X6ZPL300DPI" + usps_separate_continue_page = "SEPARATECONTINUEPAGE" """ Unified Label type mapping """ - ZPL = ZPL300DPI - PNG = JPG - - -class ShippingService(lib.StrEnum): - """Carrier specific services""" - - usps_standard_service = "USPS Standard Service" - usps_parcel_select = "PARCEL_SELECT" - usps_parcel_select_lightweight = "PARCEL_SELECT_LIGHTWEIGHT" - usps_priority_mail_express = "PRIORITY_MAIL_EXPRESS" - usps_priority_mail = "PRIORITY_MAIL" - usps_first_class_package_service = "FIRST-CLASS_PACKAGE_SERVICE" - usps_library_mail = "LIBRARY_MAIL" - usps_media_mail = "MEDIA_MAIL" - usps_bound_printed_matter = "BOUND_PRINTED_MATTER" - usps_connect_local = "USPS_CONNECT_LOCAL" - usps_connect_mail = "USPS_CONNECT_MAIL" - usps_connect_next_day = "USPS_CONNECT_NEXT_DAY" - usps_connect_regional = "USPS_CONNECT_REGIONAL" - usps_connect_same_day = "USPS_CONNECT_SAME_DAY" - usps_ground_advantage = "USPS_GROUND_ADVANTAGE" - usps_retail_ground = "USPS_RETAIL_GROUND" - usps_all = "ALL" + PDF = usps_6_x_4_label + ZPL = usps_4_x_6_zpl_203_dpi + + +class PackagingType(lib.StrEnum): + variable = "VARIABLE" + flat_rate_envelope = "FLAT RATE ENVELOPE" + padded_flat_rate_envelope = "PADDED FLAT RATE ENVELOPE" + legal_flat_rate_envelope = "LEGAL FLAT RATE ENVELOPE" + sm_flat_rate_envelope = "SM FLAT RATE ENVELOPE" + window_flat_rate_envelope = "WINDOW FLAT RATE ENVELOPE" + gift_card_flat_rate_envelope = "GIFT CARD FLAT RATE ENVELOPE" + sm_flat_rate_box = "SM FLAT RATE BOX" + md_flat_rate_box = "MD FLAT RATE BOX" + lg_flat_rate_box = "LG FLAT RATE BOX" + regional_rate_box_a = "REGIONALRATEBOXA" + regional_rate_box_b = "REGIONALRATEBOXB" + cubic_parcels = "CUBIC PARCELS" + cubic_soft_pack = "CUBIC SOFT PACK" + + """ Unified Packaging type mapping """ + envelope = flat_rate_envelope + pak = padded_flat_rate_envelope + pallet = cubic_parcels + small_box = sm_flat_rate_box + medium_box = md_flat_rate_box + tube = variable + your_packaging = variable + + +class FirstClassMailType(lib.StrEnum): + flat = "FLAT" + letter = "LETTER" + postcard = "POSTCARD" + package_service = "PACKAGE SERVICE" + package_service_retail = "PACKAGE SERVICE RETAIL" + + """ Packaging type correspondence """ + variable = package_service + flat_rate_envelope = flat + padded_flat_rate_envelope = flat + legal_flat_rate_envelope = flat + sm_flat_rate_envelope = flat + window_flat_rate_envelope = flat + gift_card_flat_rate_envelope = postcard + sm_flat_rate_box = package_service + md_flat_rate_box = package_service + lg_flat_rate_box = package_service + cubic_parcels = package_service + cubic_soft_pack = package_service + regional_rate_box_a = package_service_retail + regional_rate_box_b = package_service_retail + + +class SortLevelType(lib.StrEnum): + letter = "LETTER" + large_envelope = "LARGEENVELOPE" + package = "PACKAGE" + flat_rate = "FLATRATE" + + """ Packaging type correspondence """ + variable = package + sm_flat_rate_box = flat_rate + md_flat_rate_box = flat_rate + lg_flat_rate_box = flat_rate + flat_rate_envelope = flat_rate + sm_flat_rate_envelope = flat_rate + legal_flat_rate_envelope = flat_rate + gift_card_flat_rate_envelope = flat_rate + padded_flat_rate_envelope = large_envelope + window_flat_rate_envelope = large_envelope + cubic_parcels = package + cubic_soft_pack = package + regional_rate_box_a = package + regional_rate_box_b = package class ShippingOption(lib.Enum): - """Carrier specific options""" - - # fmt: off - usps_label_delivery_service = lib.OptionEnum("415", bool) - usps_tracking_plus_6_months = lib.OptionEnum("480", bool) - usps_tracking_plus_1_year = lib.OptionEnum("481", bool) - usps_tracking_plus_3_years = lib.OptionEnum("482", bool) - usps_tracking_plus_5_years = lib.OptionEnum("483", bool) - usps_tracking_plus_7_years = lib.OptionEnum("484", bool) - usps_tracking_plus_10_years = lib.OptionEnum("485", bool) - usps_tracking_plus_signature_3_years = lib.OptionEnum("486", bool) - usps_tracking_plus_signature_5_years = lib.OptionEnum("487", bool) - usps_tracking_plus_signature_7_years = lib.OptionEnum("488", bool) - usps_tracking_plus_signature_10_years = lib.OptionEnum("489", bool) - usps_hazardous_materials_air_eligible_ethanol = lib.OptionEnum("810", bool) - usps_hazardous_materials_class_1_toy_propellant_safety_fuse_package = lib.OptionEnum("811", bool) - usps_hazardous_materials_class_3_flammable_and_combustible_liquids = lib.OptionEnum("812", bool) - usps_hazardous_materials_class_7_radioactive_materials = lib.OptionEnum("813", bool) - usps_hazardous_materials_class_8_air_eligible_corrosive_materials = lib.OptionEnum("814", bool) - usps_hazardous_materials_class_8_nonspillable_wet_batteries = lib.OptionEnum("815", bool) - usps_hazardous_materials_class_9_lithium_battery_marked_ground_only = lib.OptionEnum("816", bool) - usps_hazardous_materials_class_9_lithium_battery_returns = lib.OptionEnum("817", bool) - usps_hazardous_materials_class_9_marked_lithium_batteries = lib.OptionEnum("818", bool) - usps_hazardous_materials_class_9_dry_ice = lib.OptionEnum("819", bool) - usps_hazardous_materials_class_9_unmarked_lithium_batteries = lib.OptionEnum("820", bool) - usps_hazardous_materials_class_9_magnetized_materials = lib.OptionEnum("821", bool) - usps_hazardous_materials_division_4_1_mailable_flammable_solids_and_safety_matches = lib.OptionEnum("822", bool) - usps_hazardous_materials_division_5_1_oxidizers = lib.OptionEnum("823", bool) - usps_hazardous_materials_division_5_2_organic_peroxides = lib.OptionEnum("824", bool) - usps_hazardous_materials_division_6_1_toxic_materials = lib.OptionEnum("825", bool) - usps_hazardous_materials_division_6_2_biological_materials = lib.OptionEnum("826", bool) - usps_hazardous_materials_excepted_quantity_provision = lib.OptionEnum("827", bool) - usps_hazardous_materials_ground_only_hazardous_materials = lib.OptionEnum("828", bool) - usps_hazardous_materials_air_eligible_id8000_consumer_commodity = lib.OptionEnum("829", bool) - usps_hazardous_materials_lighters = lib.OptionEnum("830", bool) - usps_hazardous_materials_limited_quantity_ground = lib.OptionEnum("831", bool) - usps_hazardous_materials_small_quantity_provision_markings_required = lib.OptionEnum("832", bool) - usps_hazardous_materials = lib.OptionEnum("857", bool) - usps_certified_mail = lib.OptionEnum("910", bool) - usps_certified_mail_restricted_delivery = lib.OptionEnum("911", bool) - usps_certified_mail_adult_signature_required = lib.OptionEnum("912", bool) - usps_certified_mail_adult_signature_restricted_delivery = lib.OptionEnum("913", bool) - usps_collect_on_delivery = lib.OptionEnum("915", float) - usps_collect_on_delivery_restricted_delivery = lib.OptionEnum("917", bool) - usps_tracking_electronic = lib.OptionEnum("920", bool) - usps_signature_confirmation = lib.OptionEnum("921", bool) - usps_adult_signature_required = lib.OptionEnum("922", bool) - usps_adult_signature_restricted_delivery = lib.OptionEnum("923", bool) - usps_signature_confirmation_restricted_delivery = lib.OptionEnum("924", bool) - usps_priority_mail_express_merchandise_insurance = lib.OptionEnum("925", bool) - usps_insurance_bellow_500 = lib.OptionEnum("930", float) - usps_insurance_above_500 = lib.OptionEnum("931", float) - usps_insurance_restricted_delivery = lib.OptionEnum("934", bool) - usps_registered_mail = lib.OptionEnum("940", bool) - usps_registered_mail_restricted_delivery = lib.OptionEnum("941", bool) - usps_return_receipt = lib.OptionEnum("955", bool) - usps_return_receipt_electronic = lib.OptionEnum("957", bool) - usps_signature_requested_priority_mail_express_only = lib.OptionEnum("981", bool) - usps_parcel_locker_delivery = lib.OptionEnum("984", bool) - usps_po_to_addressee_priority_mail_express_only = lib.OptionEnum("986", bool) - usps_sunday_delivery = lib.OptionEnum("981", bool) - # fmt: on - - """ Custom Options """ - usps_price_type = lib.OptionEnum("priceType") - usps_facility_id = lib.OptionEnum("facilityId") - usps_hold_for_pickup = lib.OptionEnum("holdForPickup", bool) - usps_rate_indicator = lib.OptionEnum("rateIndicator") - usps_processing_category = lib.OptionEnum("processingCategory") - usps_carrier_release = lib.OptionEnum("carrierRelease", bool) - usps_physical_signature_required = lib.OptionEnum("physicalSignatureRequired", bool) - usps_restriction_type = lib.OptionEnum("restrictionType") - - """ Unified Option type mapping """ - cash_on_delivery = usps_collect_on_delivery - signature_confirmation = usps_signature_confirmation - sunday_delivery = usps_sunday_delivery - hold_at_location = usps_hold_for_pickup - - -CUSTOM_OPTIONS = [ - ShippingOption.usps_price_type.name, - ShippingOption.usps_facility_id.name, - ShippingOption.usps_hold_for_pickup.name, - ShippingOption.usps_rate_indicator.name, - ShippingOption.usps_processing_category.name, - ShippingOption.usps_carrier_release.name, - ShippingOption.usps_physical_signature_required.name, -] + usps_insurance = lib.OptionEnum("100", float) + usps_insurance_priority_mail_express = lib.OptionEnum("101", float) + usps_return_receipt = lib.OptionEnum("102") + usps_collect_on_delivery = lib.OptionEnum("103") + usps_certificate_of_mailing_form_3665 = lib.OptionEnum("104") + usps_certified_mail = lib.OptionEnum("105") + usps_tracking = lib.OptionEnum("106") + usps_signature_confirmation = lib.OptionEnum("108") + usps_registered_mail = lib.OptionEnum("109") + usps_return_receipt_electronic = lib.OptionEnum("110") + usps_registered_mail_cod_collection_charge = lib.OptionEnum("112") + usps_return_receipt_priority_mail_express = lib.OptionEnum("118") + usps_adult_signature_required = lib.OptionEnum("119") + usps_adult_signature_restricted_delivery = lib.OptionEnum("120") + usps_insurance_priority_mail = lib.OptionEnum("125", float) + usps_tracking_electronic = lib.OptionEnum("155") + usps_signature_confirmation_electronic = lib.OptionEnum("156") + usps_certificate_of_mailing_form_3817 = lib.OptionEnum("160") + usps_priority_mail_express_10_30_am_delivery = lib.OptionEnum("161") + usps_certified_mail_restricted_delivery = lib.OptionEnum("170") + usps_certified_mail_adult_signature_required = lib.OptionEnum("171") + usps_certified_mail_adult_signature_restricted_delivery = lib.OptionEnum("172") + usps_signature_confirm_restrict_delivery = lib.OptionEnum("173") + usps_signature_confirmation_electronic_restricted_delivery = lib.OptionEnum("174") + usps_collect_on_delivery_restricted_delivery = lib.OptionEnum("175") + usps_registered_mail_restricted_delivery = lib.OptionEnum("176") + usps_insurance_restricted_delivery = lib.OptionEnum("177", float) + usps_insurance_restrict_delivery_priority_mail = lib.OptionEnum("179", float) + usps_insurance_restrict_delivery_priority_mail_express = lib.OptionEnum( + "178", float + ) + usps_insurance_restrict_delivery_bulk_only = lib.OptionEnum("180", float) + usps_scan_retention = lib.OptionEnum("181") + usps_scan_signature_retention = lib.OptionEnum("182") + usps_special_handling_fragile = lib.OptionEnum("190") + + """ Non official options """ + usps_option_machinable_item = lib.OptionEnum("usps_option_machinable_item", bool) + usps_option_ground_only = lib.OptionEnum("usps_option_ground_only", bool) + usps_option_return_service_info = lib.OptionEnum( + "usps_option_return_service_info", bool + ) + usps_option_ship_info = lib.OptionEnum("usps_option_ship_info", bool) + usps_option_allow_non_cleansed_dest_addr = lib.OptionEnum("usps_option_allow_non_cleansed_dest_addr", bool) + usps_option_allow_non_cleansed_origin_addr = lib.OptionEnum("usps_option_allow_non_cleansed_origin_addr", bool) + + """ Unified Shipment Option type mapping """ + insurance = usps_insurance + + @classmethod + def insurance_from(cls, options: lib.units.Options) -> typing.Optional[float]: + return next( + (option.state for key, option in options if "usps_insurance" in key), + options.insurance, + ) + + @classmethod + def non_delivery_from(cls, options: lib.units.Options) -> typing.Optional[str]: + # Gets the first provided non delivery option or default to "RETURN" + return next( + (option.state for name, option in options if "non_delivery" in name), + "RETURN", + ) def shipping_options_initializer( options: dict, - package_options: units.ShippingOptions = None, -) -> units.ShippingOptions: + package_options: lib.units.Options = None, +) -> lib.units.Options: """ Apply default values to the given options. """ + _options = options.copy() if package_options is not None: - options.update(package_options.content) + _options.update(package_options.content) + + def items_filter(code: str) -> bool: + return code in ShippingOption and "usps_option" not in code # type:ignore + + return lib.units.ShippingOptions( + _options, ShippingOption, items_filter=items_filter + ) + + +class ShipmentService(lib.Enum): + usps_first_class = "First Class" + usps_first_class_commercial = "First Class Commercial" + usps_first_class_hfp_commercial = "First Class HFPCommercial" + usps_priority = "Priority" + usps_priority_commercial = "Priority Commercial" + usps_priority_cpp = "Priority Cpp" + usps_priority_hfp_commercial = "Priority HFP Commercial" + usps_priority_hfp_cpp = "Priority HFP CPP" + usps_priority_mail_express = "Priority Mail Express" + usps_priority_mail_express_commercial = "Priority Mail Express Commercial" + usps_priority_mail_express_cpp = "Priority Mail Express CPP" + usps_priority_mail_express_sh = "Priority Mail Express Sh" + usps_priority_mail_express_sh_commercial = "Priority Mail Express ShCommercial" + usps_priority_mail_express_hfp = "Priority Mail Express HFP" + usps_priority_mail_express_hfp_commercial = "Priority Mail Express HFP Commercial" + usps_priority_mail_express_hfp_cpp = "Priority Mail Express HFP CPP" + usps_priority_mail_cubic = "Priority Mail Cubic" + usps_retail_ground = "Retail Ground" + usps_media = "Media" + usps_library = "Library" + usps_all = "All" + usps_online = "Online" + usps_plus = "Plus" + usps_bpm = "BPM" + usps_ground_advantage = "Ground Advantage" + usps_ground_advantage_commercial = "Ground Advantage Commercial" + usps_ground_advantage_hfp = "Ground Advantage HFP" + usps_ground_advantage_hfp_commercial = "Ground Advantage HFP Commercial" + usps_ground_advantage_cubic = "Ground Advantage Cubic" + + +class ServiceClassID(lib.Enum): + usps_first_class = "0" + usps_first_class_mail_large_envelope = usps_first_class + usps_first_class_mail_lt_letter = usps_first_class + usps_first_class_mail_lt_parcel = usps_first_class + usps_first_class_mail_postcards = usps_first_class + usps_priority_mail = "1" + usps_priority_mail_express_hold_for_pickup = "2" + usps_priority_mail_express = "3" + usps_standard_post = "4" + usps_bpm_parcels = "5" + usps_media_mail = "6" + usps_library_mail = "7" + usps_priority_mail_express_flat_rate_envelope = "13" + usps_first_class_mail_large_postcards = "15" + usps_priority_mail_flat_rate_envelope = "16" + usps_priority_mail_medium_flat_rate_box = "17" + usps_bpm_flats = "20" + usps_priority_mail_large_flat_rate_box = "22" + usps_priority_mail_express_sunday_holiday_delivery = "23" + usps_priority_mail_express_sunday_holiday_delivery_flat_rate_envelope = "25" + usps_priority_mail_express_flat_rate_envelope_hold_for_pickup = "27" + usps_priority_mail_small_flat_rate_box = "28" + usps_priority_mail_padded_flat_rate_envelope = "29" + usps_priority_mail_express_legal_flat_rate_envelope = "30" + usps_priority_mail_express_legal_flat_rate_envelope_hold_for_pickup = "31" + usps_priority_mail_express_sunday_holiday_delivery_legal_flat_rate_envelope = "32" + usps_priority_mail_hold_for_pickup = "33" + usps_priority_mail_large_flat_rate_box_hold_for_pickup = "34" + usps_priority_mail_medium_flat_rate_box_hold_for_pickup = "35" + usps_priority_mail_small_flat_rate_box_hold_for_pickup = "36" + usps_priority_mail_flat_rate_envelope_hold_for_pickup = "37" + usps_priority_mail_gift_card_flat_rate_envelope = "38" + usps_priority_mail_gift_card_flat_rate_envelope_hold_for_pickup = "39" + usps_priority_mail_window_flat_rate_envelope = "40" + usps_priority_mail_window_flat_rate_envelope_hold_for_pickup = "41" + usps_priority_mail_small_flat_rate_envelope = "42" + usps_priority_mail_small_flat_rate_envelope_hold_for_pickup = "43" + usps_priority_mail_legal_flat_rate_envelope = "44" + usps_priority_mail_legal_flat_rate_envelope_hold_for_pickup = "45" + usps_priority_mail_padded_flat_rate_envelope_hold_for_pickup = "46" + usps_priority_mail_regional_rate_box_a = "47" + usps_priority_mail_regional_rate_box_a_hold_for_pickup = "48" + usps_priority_mail_regional_rate_box_b = "49" + usps_priority_mail_regional_rate_box_b_hold_for_pickup = "50" + usps_first_class_package_service_hold_for_pickup = "53" + usps_priority_mail_express_flat_rate_boxes = "55" + usps_priority_mail_express_flat_rate_boxes_hold_for_pickup = "56" + usps_priority_mail_express_sunday_holiday_delivery_flat_rate_boxes = "57" + usps_priority_mail_regional_rate_box_c = "58" + usps_priority_mail_regional_rate_box_c_hold_for_pickup = "59" + usps_first_class_package_service = "61" + usps_priority_mail_express_padded_flat_rate_envelope = "62" + usps_priority_mail_express_padded_flat_rate_envelope_hold_for_pickup = "63" + usps_priority_mail_express_sunday_holiday_delivery_padded_flat_rate_envelope = "64" + usps_parcel_select_ground = "77" + usps_priority_mail_cubic = "84" + usps_parcel_select_ground_cubic = "92" + usps_bpm_flats_hold_for_pickup = "2020" + usps_parcel_select_ground_hold_for_pickup = "2071" + usps_bpm_parcels_hold_for_pickup = "2077" + usps_ground_advantage_cubic = "1096" + usps_ground_advantage_cubic_hold_for_pickup = "2096" + usps_ground_advantage_cubic_hazmat = "4096" + usps_ground_advantage_cubic_parcel_locker = "6096" + usps_ground_advantage = "1058" + usps_ground_advantage_hold_for_pickup = "2058" + usps_ground_advantage_hazmat = "4058" + usps_ground_advantage_parcel_locker = "6058" - if "insurance" in options: - if lib.to_money(options["insurance"]) > 500: - options[ShippingOption.usps_insurance_above_500.name] = options["insurance"] - else: - options[ShippingOption.usps_insurance_bellow_500.name] = options[ - "insurance" - ] - def items_filter(key: str) -> bool: - return key in ShippingOption # type: ignore +class ServiceType(lib.Enum): + usps_bpm = "BPM" + usps_media = "MEDIA" + usps_library = "LIBRARY" + usps_priority = "PRIORITY" + usps_first_class = "FIRST CLASS" + usps_priority_mail_express = "PRIORITY EXPRESS" + usps_priority_mail_cubic = "PRIORITY MAIL CUBIC" + usps_parcel_select_ground = "PARCEL SELECT GROUND" + usps_ground_advantage = "GROUND ADVANTAGE" + usps_ground_advantage_cubic = "GROUND ADVANTAGE CUBIC" - return units.ShippingOptions(options, ShippingOption, items_filter=items_filter) + """ ShipmentService type correspondence """ + usps_first_class_mail_large_envelope = usps_first_class + usps_first_class_mail_lt_letter = usps_first_class + usps_first_class_mail_lt_parcel = usps_first_class + usps_first_class_mail_postcards = usps_first_class + usps_first_class_mail_large_postcards = usps_first_class + usps_priority_mail = usps_priority + usps_priority_mail_express_hold_for_pickup = usps_priority_mail_express + usps_standard_post = usps_parcel_select_ground + usps_media_mail = usps_media + usps_library_mail = usps_library + usps_priority_mail_express_flat_rate_envelope = usps_priority_mail_express + usps_priority_mail_flat_rate_envelope = usps_priority + usps_priority_mail_medium_flat_rate_box = usps_priority_mail_cubic + usps_priority_mail_large_flat_rate_box = usps_priority_mail_cubic + usps_priority_mail_express_sunday_holiday_delivery = usps_priority_mail_express + usps_priority_mail_express_sunday_holiday_delivery_flat_rate_envelope = ( + usps_priority_mail_express + ) + usps_priority_mail_express_flat_rate_envelope_hold_for_pickup = ( + usps_priority_mail_express + ) + usps_priority_mail_small_flat_rate_box = usps_priority + usps_priority_mail_padded_flat_rate_envelope = usps_priority + usps_priority_mail_express_legal_flat_rate_envelope = usps_priority_mail_express + usps_priority_mail_express_legal_flat_rate_envelope_hold_for_pickup = ( + usps_priority_mail_express + ) + usps_priority_mail_express_sunday_holiday_delivery_legal_flat_rate_envelope = ( + usps_priority_mail_express + ) + usps_priority_mail_hold_for_pickup = usps_priority_mail_cubic + usps_priority_mail_large_flat_rate_box_hold_for_pickup = usps_priority_mail_cubic + usps_priority_mail_medium_flat_rate_box_hold_for_pickup = usps_priority_mail_cubic + usps_priority_mail_small_flat_rate_box_hold_for_pickup = usps_priority_mail_cubic + usps_priority_mail_flat_rate_envelope_hold_for_pickup = usps_priority + usps_priority_mail_gift_card_flat_rate_envelope = usps_priority + usps_priority_mail_gift_card_flat_rate_envelope_hold_for_pickup = usps_priority + usps_priority_mail_window_flat_rate_envelope = usps_priority + usps_priority_mail_window_flat_rate_envelope_hold_for_pickup = usps_priority + usps_priority_mail_small_flat_rate_envelope = usps_priority + usps_priority_mail_small_flat_rate_envelope_hold_for_pickup = usps_priority + usps_priority_mail_legal_flat_rate_envelope = usps_priority + usps_priority_mail_legal_flat_rate_envelope_hold_for_pickup = usps_priority + usps_priority_mail_padded_flat_rate_envelope_hold_for_pickup = usps_priority + usps_priority_mail_regional_rate_box_a = usps_priority_mail_cubic + usps_priority_mail_regional_rate_box_a_hold_for_pickup = usps_priority_mail_cubic + usps_priority_mail_regional_rate_box_b = usps_priority_mail_cubic + usps_priority_mail_regional_rate_box_b_hold_for_pickup = usps_priority_mail_cubic + usps_first_class_package_service_hold_for_pickup = usps_first_class + usps_priority_mail_express_flat_rate_boxes = usps_priority_mail_express + usps_priority_mail_express_flat_rate_boxes_hold_for_pickup = ( + usps_priority_mail_express + ) + usps_priority_mail_express_sunday_holiday_delivery_flat_rate_boxes = ( + usps_priority_mail_express + ) + usps_priority_mail_regional_rate_box_c = usps_priority_mail_cubic + usps_priority_mail_regional_rate_box_c_hold_for_pickup = usps_priority_mail_cubic + usps_first_class_package_service = usps_first_class + usps_priority_mail_express_padded_flat_rate_envelope = usps_priority_mail_express + usps_priority_mail_express_padded_flat_rate_envelope_hold_for_pickup = ( + usps_priority_mail_express + ) + usps_priority_mail_express_sunday_holiday_delivery_padded_flat_rate_envelope = ( + usps_priority_mail_express + ) class TrackingStatus(lib.Enum): - on_hold = ["on_hold"] - delivered = ["delivered"] - in_transit = ["in_transit"] - delivery_failed = ["delivery_failed"] - delivery_delayed = ["delivery_delayed"] - out_for_delivery = ["out_for_delivery"] - ready_for_pickup = ["ready_for_pickup"] + in_transit = [""] + delivered = ["1"] + ready_for_pickup = ["16"] + delivery_failed = [ + "4", + "5", + "9", + "11", + "31", + "44", + "21", + "22", + "23", + "24", + "25", + "26", + "27", + "28", + "29", + ] + out_for_delivery = ["7"] diff --git a/modules/connectors/usps/karrio/providers/usps/utils.py b/modules/connectors/usps/karrio/providers/usps/utils.py index c903663ae7..2d9f64c69e 100644 --- a/modules/connectors/usps/karrio/providers/usps/utils.py +++ b/modules/connectors/usps/karrio/providers/usps/utils.py @@ -1,17 +1,23 @@ -import datetime -import karrio.lib as lib -import karrio.core as core -import karrio.core.errors as errors +"""Karrio USPS client settings.""" +import typing +import karrio.core.settings as settings -class Settings(core.Settings): + +class Settings(settings.Settings): """USPS connection settings.""" - # Add carrier specific api connection properties here - client_id: str - client_secret: str - account_type: str = None - account_number: str = None + # Carrier specific properties + username: str + password: str + mailer_id: str = None + customer_registration_id: str = None + logistics_manager_mailer_id: str = None + + id: str = None + account_country_code: str = "US" + metadata: dict = {} + config: dict = {} @property def carrier_name(self): @@ -19,69 +25,15 @@ def carrier_name(self): @property def server_url(self): - return "https://api.usps.com" + return "https://secure.shippingapis.com/ShippingAPI.dll" @property def tracking_url(self): return "https://tools.usps.com/go/TrackConfirmAction?tLabels={}" - @property - def connection_config(self) -> lib.units.Options: - return lib.to_connection_config( - self.config or {}, - option_type=ConnectionConfig, - ) - - @property - def access_token(self): - """Retrieve the access_token using the client_id|client_secret pair - or collect it from the cache if an unexpired access_token exist. - """ - cache_key = f"{self.carrier_name}|{self.client_id}|{self.client_secret}" - now = datetime.datetime.now() + datetime.timedelta(minutes=30) - - auth = self.connection_cache.get(cache_key) or {} - token = auth.get("access_token") - expiry = lib.to_date(auth.get("expiry"), current_format="%Y-%m-%d %H:%M:%S") - - if token is not None and expiry is not None and expiry > now: - return token - - self.connection_cache.set(cache_key, lambda: login(self)) - new_auth = self.connection_cache.get(cache_key) - - return new_auth["access_token"] - - -def login(settings: Settings, client_id: str = None, client_secret: str = None): - import karrio.providers.usps.error as error - - result = lib.request( - url=f"{settings.server_url}/oauth2/v3/token", - method="POST", - headers={"content-Type": "application/x-www-form-urlencoded"}, - data=lib.to_query_string( - dict( - grant_type="client_credentials", - client_id=client_id, - client_secret=client_secret, - ) - ), - ) - - response = lib.to_dict(result) - messages = error.parse_error_response(response, settings) - - if any(messages): - raise errors.ShippingSDKError(messages) - - expiry = datetime.datetime.now() + datetime.timedelta( - seconds=float(response.get("expires_in", 0)) - ) - - return {**response, "expiry": lib.fdatetime(expiry)} +def parse_phone_number(number: str) -> typing.Optional[str]: + if number is None: + return None -class ConnectionConfig(lib.Enum): - shipping_options = lib.OptionEnum("shipping_options", list) - shipping_services = lib.OptionEnum("shipping_services", list) + return number.replace(" ", "").replace("-", "").replace("+", "")[-10:] diff --git a/modules/connectors/usps/karrio/schemas/usps/address_validate_request.py b/modules/connectors/usps/karrio/schemas/usps/address_validate_request.py new file mode 100644 index 0000000000..1e267c4b00 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/address_validate_request.py @@ -0,0 +1,1563 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:38 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/address_validate_request.py') +# +# Command line arguments: +# ./schemas/AddressValidateRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/address_validate_request.py" ./schemas/AddressValidateRequest.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class AddressValidateRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, Revision=None, Address=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.Revision = Revision + self.Revision_nsprefix_ = None + self.Address = Address + self.Address_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, AddressValidateRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if AddressValidateRequest.subclass: + return AddressValidateRequest.subclass(*args_, **kwargs_) + else: + return AddressValidateRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Revision(self): + return self.Revision + def set_Revision(self, Revision): + self.Revision = Revision + def get_Address(self): + return self.Address + def set_Address(self, Address): + self.Address = Address + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def has__content(self): + if ( + self.Revision is not None or + self.Address is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AddressValidateRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('AddressValidateRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'AddressValidateRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AddressValidateRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AddressValidateRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AddressValidateRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AddressValidateRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Revision is not None: + namespaceprefix_ = self.Revision_nsprefix_ + ':' if (UseCapturedNS_ and self.Revision_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRevision>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Revision), input_name='Revision')), namespaceprefix_ , eol_)) + if self.Address is not None: + namespaceprefix_ = self.Address_nsprefix_ + ':' if (UseCapturedNS_ and self.Address_nsprefix_) else '' + self.Address.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Address', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Revision': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Revision') + value_ = self.gds_validate_string(value_, node, 'Revision') + self.Revision = value_ + self.Revision_nsprefix_ = child_.prefix + elif nodeName_ == 'Address': + obj_ = AddressType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Address = obj_ + obj_.original_tagname_ = 'Address' +# end class AddressValidateRequest + + +class AddressType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ID=None, FirmName=None, Address1=None, Address2=None, City=None, State=None, Urbanization=None, Zip5=None, Zip4=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ID = _cast(None, ID) + self.ID_nsprefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.Address1 = Address1 + self.Address1_nsprefix_ = None + self.Address2 = Address2 + self.Address2_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.Urbanization = Urbanization + self.Urbanization_nsprefix_ = None + self.Zip5 = Zip5 + self.Zip5_nsprefix_ = None + self.Zip4 = Zip4 + self.Zip4_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, AddressType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if AddressType.subclass: + return AddressType.subclass(*args_, **kwargs_) + else: + return AddressType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_Address1(self): + return self.Address1 + def set_Address1(self, Address1): + self.Address1 = Address1 + def get_Address2(self): + return self.Address2 + def set_Address2(self, Address2): + self.Address2 = Address2 + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_Urbanization(self): + return self.Urbanization + def set_Urbanization(self, Urbanization): + self.Urbanization = Urbanization + def get_Zip5(self): + return self.Zip5 + def set_Zip5(self, Zip5): + self.Zip5 = Zip5 + def get_Zip4(self): + return self.Zip4 + def set_Zip4(self, Zip4): + self.Zip4 = Zip4 + def get_ID(self): + return self.ID + def set_ID(self, ID): + self.ID = ID + def has__content(self): + if ( + self.FirmName is not None or + self.Address1 is not None or + self.Address2 is not None or + self.City is not None or + self.State is not None or + self.Urbanization is not None or + self.Zip5 is not None or + self.Zip4 is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AddressType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('AddressType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'AddressType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AddressType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AddressType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AddressType'): + if self.ID is not None and 'ID' not in already_processed: + already_processed.add('ID') + outfile.write(' ID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ID), input_name='ID')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AddressType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.Address1 is not None: + namespaceprefix_ = self.Address1_nsprefix_ + ':' if (UseCapturedNS_ and self.Address1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address1), input_name='Address1')), namespaceprefix_ , eol_)) + if self.Address2 is not None: + namespaceprefix_ = self.Address2_nsprefix_ + ':' if (UseCapturedNS_ and self.Address2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address2), input_name='Address2')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.Urbanization is not None: + namespaceprefix_ = self.Urbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.Urbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Urbanization), input_name='Urbanization')), namespaceprefix_ , eol_)) + if self.Zip5 is not None: + namespaceprefix_ = self.Zip5_nsprefix_ + ':' if (UseCapturedNS_ and self.Zip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZip5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Zip5), input_name='Zip5')), namespaceprefix_ , eol_)) + if self.Zip4 is not None: + namespaceprefix_ = self.Zip4_nsprefix_ + ':' if (UseCapturedNS_ and self.Zip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZip4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Zip4), input_name='Zip4')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ID', node) + if value is not None and 'ID' not in already_processed: + already_processed.add('ID') + self.ID = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'Address1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address1') + value_ = self.gds_validate_string(value_, node, 'Address1') + self.Address1 = value_ + self.Address1_nsprefix_ = child_.prefix + elif nodeName_ == 'Address2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address2') + value_ = self.gds_validate_string(value_, node, 'Address2') + self.Address2 = value_ + self.Address2_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'Urbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Urbanization') + value_ = self.gds_validate_string(value_, node, 'Urbanization') + self.Urbanization = value_ + self.Urbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'Zip5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Zip5') + value_ = self.gds_validate_string(value_, node, 'Zip5') + self.Zip5 = value_ + self.Zip5_nsprefix_ = child_.prefix + elif nodeName_ == 'Zip4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Zip4') + value_ = self.gds_validate_string(value_, node, 'Zip4') + self.Zip4 = value_ + self.Zip4_nsprefix_ = child_.prefix +# end class AddressType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'AddressValidateRequest' + rootClass = AddressValidateRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'AddressValidateRequest' + rootClass = AddressValidateRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'AddressValidateRequest' + rootClass = AddressValidateRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'AddressValidateRequest' + rootClass = AddressValidateRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from address_validate_request import *\n\n') + sys.stdout.write('import address_validate_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "AddressType", + "AddressValidateRequest" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/address_validate_response.py b/modules/connectors/usps/karrio/schemas/usps/address_validate_response.py new file mode 100644 index 0000000000..e9d6d1b416 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/address_validate_response.py @@ -0,0 +1,1722 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:38 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/address_validate_response.py') +# +# Command line arguments: +# ./schemas/AddressValidateResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/address_validate_response.py" ./schemas/AddressValidateResponse.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class AddressValidateResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Address=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Address = Address + self.Address_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, AddressValidateResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if AddressValidateResponse.subclass: + return AddressValidateResponse.subclass(*args_, **kwargs_) + else: + return AddressValidateResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Address(self): + return self.Address + def set_Address(self, Address): + self.Address = Address + def has__content(self): + if ( + self.Address is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AddressValidateResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('AddressValidateResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'AddressValidateResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AddressValidateResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AddressValidateResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AddressValidateResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AddressValidateResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Address is not None: + namespaceprefix_ = self.Address_nsprefix_ + ':' if (UseCapturedNS_ and self.Address_nsprefix_) else '' + self.Address.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Address', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Address': + obj_ = AddressType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Address = obj_ + obj_.original_tagname_ = 'Address' +# end class AddressValidateResponse + + +class AddressType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ID=None, FirmName=None, Address1=None, Address2=None, Address2Abbreviation=None, City=None, CityAbbreviation=None, State=None, Urbanization=None, Zip5=None, Zip4=None, DeliveryPoint=None, CarrierRoute=None, Footnotes=None, DPVConfirmation=None, DPVCMRA=None, DPVFootnotes=None, Business=None, CentralDeliveryPoint=None, Vacant=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ID = _cast(int, ID) + self.ID_nsprefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.Address1 = Address1 + self.Address1_nsprefix_ = None + self.Address2 = Address2 + self.Address2_nsprefix_ = None + self.Address2Abbreviation = Address2Abbreviation + self.Address2Abbreviation_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.CityAbbreviation = CityAbbreviation + self.CityAbbreviation_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.Urbanization = Urbanization + self.Urbanization_nsprefix_ = None + self.Zip5 = Zip5 + self.Zip5_nsprefix_ = None + self.Zip4 = Zip4 + self.Zip4_nsprefix_ = None + self.DeliveryPoint = DeliveryPoint + self.DeliveryPoint_nsprefix_ = None + self.CarrierRoute = CarrierRoute + self.CarrierRoute_nsprefix_ = None + self.Footnotes = Footnotes + self.Footnotes_nsprefix_ = None + self.DPVConfirmation = DPVConfirmation + self.DPVConfirmation_nsprefix_ = None + self.DPVCMRA = DPVCMRA + self.DPVCMRA_nsprefix_ = None + self.DPVFootnotes = DPVFootnotes + self.DPVFootnotes_nsprefix_ = None + self.Business = Business + self.Business_nsprefix_ = None + self.CentralDeliveryPoint = CentralDeliveryPoint + self.CentralDeliveryPoint_nsprefix_ = None + self.Vacant = Vacant + self.Vacant_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, AddressType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if AddressType.subclass: + return AddressType.subclass(*args_, **kwargs_) + else: + return AddressType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_Address1(self): + return self.Address1 + def set_Address1(self, Address1): + self.Address1 = Address1 + def get_Address2(self): + return self.Address2 + def set_Address2(self, Address2): + self.Address2 = Address2 + def get_Address2Abbreviation(self): + return self.Address2Abbreviation + def set_Address2Abbreviation(self, Address2Abbreviation): + self.Address2Abbreviation = Address2Abbreviation + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_CityAbbreviation(self): + return self.CityAbbreviation + def set_CityAbbreviation(self, CityAbbreviation): + self.CityAbbreviation = CityAbbreviation + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_Urbanization(self): + return self.Urbanization + def set_Urbanization(self, Urbanization): + self.Urbanization = Urbanization + def get_Zip5(self): + return self.Zip5 + def set_Zip5(self, Zip5): + self.Zip5 = Zip5 + def get_Zip4(self): + return self.Zip4 + def set_Zip4(self, Zip4): + self.Zip4 = Zip4 + def get_DeliveryPoint(self): + return self.DeliveryPoint + def set_DeliveryPoint(self, DeliveryPoint): + self.DeliveryPoint = DeliveryPoint + def get_CarrierRoute(self): + return self.CarrierRoute + def set_CarrierRoute(self, CarrierRoute): + self.CarrierRoute = CarrierRoute + def get_Footnotes(self): + return self.Footnotes + def set_Footnotes(self, Footnotes): + self.Footnotes = Footnotes + def get_DPVConfirmation(self): + return self.DPVConfirmation + def set_DPVConfirmation(self, DPVConfirmation): + self.DPVConfirmation = DPVConfirmation + def get_DPVCMRA(self): + return self.DPVCMRA + def set_DPVCMRA(self, DPVCMRA): + self.DPVCMRA = DPVCMRA + def get_DPVFootnotes(self): + return self.DPVFootnotes + def set_DPVFootnotes(self, DPVFootnotes): + self.DPVFootnotes = DPVFootnotes + def get_Business(self): + return self.Business + def set_Business(self, Business): + self.Business = Business + def get_CentralDeliveryPoint(self): + return self.CentralDeliveryPoint + def set_CentralDeliveryPoint(self, CentralDeliveryPoint): + self.CentralDeliveryPoint = CentralDeliveryPoint + def get_Vacant(self): + return self.Vacant + def set_Vacant(self, Vacant): + self.Vacant = Vacant + def get_ID(self): + return self.ID + def set_ID(self, ID): + self.ID = ID + def has__content(self): + if ( + self.FirmName is not None or + self.Address1 is not None or + self.Address2 is not None or + self.Address2Abbreviation is not None or + self.City is not None or + self.CityAbbreviation is not None or + self.State is not None or + self.Urbanization is not None or + self.Zip5 is not None or + self.Zip4 is not None or + self.DeliveryPoint is not None or + self.CarrierRoute is not None or + self.Footnotes is not None or + self.DPVConfirmation is not None or + self.DPVCMRA is not None or + self.DPVFootnotes is not None or + self.Business is not None or + self.CentralDeliveryPoint is not None or + self.Vacant is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AddressType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('AddressType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'AddressType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AddressType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AddressType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AddressType'): + if self.ID is not None and 'ID' not in already_processed: + already_processed.add('ID') + outfile.write(' ID="%s"' % self.gds_format_integer(self.ID, input_name='ID')) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AddressType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.Address1 is not None: + namespaceprefix_ = self.Address1_nsprefix_ + ':' if (UseCapturedNS_ and self.Address1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address1), input_name='Address1')), namespaceprefix_ , eol_)) + if self.Address2 is not None: + namespaceprefix_ = self.Address2_nsprefix_ + ':' if (UseCapturedNS_ and self.Address2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address2), input_name='Address2')), namespaceprefix_ , eol_)) + if self.Address2Abbreviation is not None: + namespaceprefix_ = self.Address2Abbreviation_nsprefix_ + ':' if (UseCapturedNS_ and self.Address2Abbreviation_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress2Abbreviation>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address2Abbreviation), input_name='Address2Abbreviation')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.CityAbbreviation is not None: + namespaceprefix_ = self.CityAbbreviation_nsprefix_ + ':' if (UseCapturedNS_ and self.CityAbbreviation_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCityAbbreviation>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CityAbbreviation), input_name='CityAbbreviation')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.Urbanization is not None: + namespaceprefix_ = self.Urbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.Urbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Urbanization), input_name='Urbanization')), namespaceprefix_ , eol_)) + if self.Zip5 is not None: + namespaceprefix_ = self.Zip5_nsprefix_ + ':' if (UseCapturedNS_ and self.Zip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZip5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Zip5), input_name='Zip5')), namespaceprefix_ , eol_)) + if self.Zip4 is not None: + namespaceprefix_ = self.Zip4_nsprefix_ + ':' if (UseCapturedNS_ and self.Zip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZip4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Zip4), input_name='Zip4')), namespaceprefix_ , eol_)) + if self.DeliveryPoint is not None: + namespaceprefix_ = self.DeliveryPoint_nsprefix_ + ':' if (UseCapturedNS_ and self.DeliveryPoint_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDeliveryPoint>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DeliveryPoint), input_name='DeliveryPoint')), namespaceprefix_ , eol_)) + if self.CarrierRoute is not None: + namespaceprefix_ = self.CarrierRoute_nsprefix_ + ':' if (UseCapturedNS_ and self.CarrierRoute_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCarrierRoute>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CarrierRoute), input_name='CarrierRoute')), namespaceprefix_ , eol_)) + if self.Footnotes is not None: + namespaceprefix_ = self.Footnotes_nsprefix_ + ':' if (UseCapturedNS_ and self.Footnotes_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFootnotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Footnotes), input_name='Footnotes')), namespaceprefix_ , eol_)) + if self.DPVConfirmation is not None: + namespaceprefix_ = self.DPVConfirmation_nsprefix_ + ':' if (UseCapturedNS_ and self.DPVConfirmation_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDPVConfirmation>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DPVConfirmation), input_name='DPVConfirmation')), namespaceprefix_ , eol_)) + if self.DPVCMRA is not None: + namespaceprefix_ = self.DPVCMRA_nsprefix_ + ':' if (UseCapturedNS_ and self.DPVCMRA_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDPVCMRA>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DPVCMRA), input_name='DPVCMRA')), namespaceprefix_ , eol_)) + if self.DPVFootnotes is not None: + namespaceprefix_ = self.DPVFootnotes_nsprefix_ + ':' if (UseCapturedNS_ and self.DPVFootnotes_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDPVFootnotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DPVFootnotes), input_name='DPVFootnotes')), namespaceprefix_ , eol_)) + if self.Business is not None: + namespaceprefix_ = self.Business_nsprefix_ + ':' if (UseCapturedNS_ and self.Business_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBusiness>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Business), input_name='Business')), namespaceprefix_ , eol_)) + if self.CentralDeliveryPoint is not None: + namespaceprefix_ = self.CentralDeliveryPoint_nsprefix_ + ':' if (UseCapturedNS_ and self.CentralDeliveryPoint_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCentralDeliveryPoint>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CentralDeliveryPoint), input_name='CentralDeliveryPoint')), namespaceprefix_ , eol_)) + if self.Vacant is not None: + namespaceprefix_ = self.Vacant_nsprefix_ + ':' if (UseCapturedNS_ and self.Vacant_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sVacant>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Vacant), input_name='Vacant')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ID', node) + if value is not None and 'ID' not in already_processed: + already_processed.add('ID') + self.ID = self.gds_parse_integer(value, node, 'ID') + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'Address1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address1') + value_ = self.gds_validate_string(value_, node, 'Address1') + self.Address1 = value_ + self.Address1_nsprefix_ = child_.prefix + elif nodeName_ == 'Address2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address2') + value_ = self.gds_validate_string(value_, node, 'Address2') + self.Address2 = value_ + self.Address2_nsprefix_ = child_.prefix + elif nodeName_ == 'Address2Abbreviation': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address2Abbreviation') + value_ = self.gds_validate_string(value_, node, 'Address2Abbreviation') + self.Address2Abbreviation = value_ + self.Address2Abbreviation_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'CityAbbreviation': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CityAbbreviation') + value_ = self.gds_validate_string(value_, node, 'CityAbbreviation') + self.CityAbbreviation = value_ + self.CityAbbreviation_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'Urbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Urbanization') + value_ = self.gds_validate_string(value_, node, 'Urbanization') + self.Urbanization = value_ + self.Urbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'Zip5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Zip5') + value_ = self.gds_validate_string(value_, node, 'Zip5') + self.Zip5 = value_ + self.Zip5_nsprefix_ = child_.prefix + elif nodeName_ == 'Zip4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Zip4') + value_ = self.gds_validate_string(value_, node, 'Zip4') + self.Zip4 = value_ + self.Zip4_nsprefix_ = child_.prefix + elif nodeName_ == 'DeliveryPoint': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DeliveryPoint') + value_ = self.gds_validate_string(value_, node, 'DeliveryPoint') + self.DeliveryPoint = value_ + self.DeliveryPoint_nsprefix_ = child_.prefix + elif nodeName_ == 'CarrierRoute': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CarrierRoute') + value_ = self.gds_validate_string(value_, node, 'CarrierRoute') + self.CarrierRoute = value_ + self.CarrierRoute_nsprefix_ = child_.prefix + elif nodeName_ == 'Footnotes': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Footnotes') + value_ = self.gds_validate_string(value_, node, 'Footnotes') + self.Footnotes = value_ + self.Footnotes_nsprefix_ = child_.prefix + elif nodeName_ == 'DPVConfirmation': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DPVConfirmation') + value_ = self.gds_validate_string(value_, node, 'DPVConfirmation') + self.DPVConfirmation = value_ + self.DPVConfirmation_nsprefix_ = child_.prefix + elif nodeName_ == 'DPVCMRA': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DPVCMRA') + value_ = self.gds_validate_string(value_, node, 'DPVCMRA') + self.DPVCMRA = value_ + self.DPVCMRA_nsprefix_ = child_.prefix + elif nodeName_ == 'DPVFootnotes': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DPVFootnotes') + value_ = self.gds_validate_string(value_, node, 'DPVFootnotes') + self.DPVFootnotes = value_ + self.DPVFootnotes_nsprefix_ = child_.prefix + elif nodeName_ == 'Business': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Business') + value_ = self.gds_validate_string(value_, node, 'Business') + self.Business = value_ + self.Business_nsprefix_ = child_.prefix + elif nodeName_ == 'CentralDeliveryPoint': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CentralDeliveryPoint') + value_ = self.gds_validate_string(value_, node, 'CentralDeliveryPoint') + self.CentralDeliveryPoint = value_ + self.CentralDeliveryPoint_nsprefix_ = child_.prefix + elif nodeName_ == 'Vacant': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Vacant') + value_ = self.gds_validate_string(value_, node, 'Vacant') + self.Vacant = value_ + self.Vacant_nsprefix_ = child_.prefix +# end class AddressType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'AddressValidateResponse' + rootClass = AddressValidateResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'AddressValidateResponse' + rootClass = AddressValidateResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'AddressValidateResponse' + rootClass = AddressValidateResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'AddressValidateResponse' + rootClass = AddressValidateResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from address_validate_response import *\n\n') + sys.stdout.write('import address_validate_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "AddressType", + "AddressValidateResponse" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/carrier_pickup_availability_request.py b/modules/connectors/usps/karrio/schemas/usps/carrier_pickup_availability_request.py new file mode 100644 index 0000000000..14f092fec6 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/carrier_pickup_availability_request.py @@ -0,0 +1,1471 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:38 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/carrier_pickup_availability_request.py') +# +# Command line arguments: +# ./schemas/CarrierPickupAvailabilityRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/carrier_pickup_availability_request.py" ./schemas/CarrierPickupAvailabilityRequest.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class CarrierPickupAvailabilityRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, FirmName=None, SuiteOrApt=None, Address2=None, Urbanization=None, City=None, State=None, ZIP5=None, ZIP4=None, Date=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.SuiteOrApt = SuiteOrApt + self.SuiteOrApt_nsprefix_ = None + self.Address2 = Address2 + self.Address2_nsprefix_ = None + self.Urbanization = Urbanization + self.Urbanization_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.ZIP5 = ZIP5 + self.ZIP5_nsprefix_ = None + self.ZIP4 = ZIP4 + self.ZIP4_nsprefix_ = None + self.Date = Date + self.Date_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CarrierPickupAvailabilityRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CarrierPickupAvailabilityRequest.subclass: + return CarrierPickupAvailabilityRequest.subclass(*args_, **kwargs_) + else: + return CarrierPickupAvailabilityRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_SuiteOrApt(self): + return self.SuiteOrApt + def set_SuiteOrApt(self, SuiteOrApt): + self.SuiteOrApt = SuiteOrApt + def get_Address2(self): + return self.Address2 + def set_Address2(self, Address2): + self.Address2 = Address2 + def get_Urbanization(self): + return self.Urbanization + def set_Urbanization(self, Urbanization): + self.Urbanization = Urbanization + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_ZIP5(self): + return self.ZIP5 + def set_ZIP5(self, ZIP5): + self.ZIP5 = ZIP5 + def get_ZIP4(self): + return self.ZIP4 + def set_ZIP4(self, ZIP4): + self.ZIP4 = ZIP4 + def get_Date(self): + return self.Date + def set_Date(self, Date): + self.Date = Date + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.FirmName is not None or + self.SuiteOrApt is not None or + self.Address2 is not None or + self.Urbanization is not None or + self.City is not None or + self.State is not None or + self.ZIP5 is not None or + self.ZIP4 is not None or + self.Date is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupAvailabilityRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CarrierPickupAvailabilityRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CarrierPickupAvailabilityRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CarrierPickupAvailabilityRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CarrierPickupAvailabilityRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CarrierPickupAvailabilityRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupAvailabilityRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.SuiteOrApt is not None: + namespaceprefix_ = self.SuiteOrApt_nsprefix_ + ':' if (UseCapturedNS_ and self.SuiteOrApt_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSuiteOrApt>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SuiteOrApt), input_name='SuiteOrApt')), namespaceprefix_ , eol_)) + if self.Address2 is not None: + namespaceprefix_ = self.Address2_nsprefix_ + ':' if (UseCapturedNS_ and self.Address2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address2), input_name='Address2')), namespaceprefix_ , eol_)) + if self.Urbanization is not None: + namespaceprefix_ = self.Urbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.Urbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Urbanization), input_name='Urbanization')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.ZIP5 is not None: + namespaceprefix_ = self.ZIP5_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP5), input_name='ZIP5')), namespaceprefix_ , eol_)) + if self.ZIP4 is not None: + namespaceprefix_ = self.ZIP4_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP4), input_name='ZIP4')), namespaceprefix_ , eol_)) + if self.Date is not None: + namespaceprefix_ = self.Date_nsprefix_ + ':' if (UseCapturedNS_ and self.Date_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Date), input_name='Date')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'SuiteOrApt': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SuiteOrApt') + value_ = self.gds_validate_string(value_, node, 'SuiteOrApt') + self.SuiteOrApt = value_ + self.SuiteOrApt_nsprefix_ = child_.prefix + elif nodeName_ == 'Address2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address2') + value_ = self.gds_validate_string(value_, node, 'Address2') + self.Address2 = value_ + self.Address2_nsprefix_ = child_.prefix + elif nodeName_ == 'Urbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Urbanization') + value_ = self.gds_validate_string(value_, node, 'Urbanization') + self.Urbanization = value_ + self.Urbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP5') + value_ = self.gds_validate_string(value_, node, 'ZIP5') + self.ZIP5 = value_ + self.ZIP5_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP4') + value_ = self.gds_validate_string(value_, node, 'ZIP4') + self.ZIP4 = value_ + self.ZIP4_nsprefix_ = child_.prefix + elif nodeName_ == 'Date': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Date') + value_ = self.gds_validate_string(value_, node, 'Date') + self.Date = value_ + self.Date_nsprefix_ = child_.prefix +# end class CarrierPickupAvailabilityRequest + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupAvailabilityRequest' + rootClass = CarrierPickupAvailabilityRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupAvailabilityRequest' + rootClass = CarrierPickupAvailabilityRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupAvailabilityRequest' + rootClass = CarrierPickupAvailabilityRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupAvailabilityRequest' + rootClass = CarrierPickupAvailabilityRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from carrier_pickup_availability_request import *\n\n') + sys.stdout.write('import carrier_pickup_availability_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CarrierPickupAvailabilityRequest" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/carrier_pickup_availability_response.py b/modules/connectors/usps/karrio/schemas/usps/carrier_pickup_availability_response.py new file mode 100644 index 0000000000..30906295b8 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/carrier_pickup_availability_response.py @@ -0,0 +1,1481 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:38 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/carrier_pickup_availability_response.py') +# +# Command line arguments: +# ./schemas/CarrierPickupAvailabilityResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/carrier_pickup_availability_response.py" ./schemas/CarrierPickupAvailabilityResponse.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class CarrierPickupAvailabilityResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, FirmName=None, SuiteOrApt=None, Address2=None, Urbanization=None, City=None, State=None, ZIP5=None, ZIP4=None, DayOfWeek=None, Date=None, CarrierRoute=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.SuiteOrApt = SuiteOrApt + self.SuiteOrApt_nsprefix_ = None + self.Address2 = Address2 + self.Address2_nsprefix_ = None + self.Urbanization = Urbanization + self.Urbanization_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.ZIP5 = ZIP5 + self.ZIP5_nsprefix_ = None + self.ZIP4 = ZIP4 + self.ZIP4_nsprefix_ = None + self.DayOfWeek = DayOfWeek + self.DayOfWeek_nsprefix_ = None + self.Date = Date + self.Date_nsprefix_ = None + self.CarrierRoute = CarrierRoute + self.CarrierRoute_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CarrierPickupAvailabilityResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CarrierPickupAvailabilityResponse.subclass: + return CarrierPickupAvailabilityResponse.subclass(*args_, **kwargs_) + else: + return CarrierPickupAvailabilityResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_SuiteOrApt(self): + return self.SuiteOrApt + def set_SuiteOrApt(self, SuiteOrApt): + self.SuiteOrApt = SuiteOrApt + def get_Address2(self): + return self.Address2 + def set_Address2(self, Address2): + self.Address2 = Address2 + def get_Urbanization(self): + return self.Urbanization + def set_Urbanization(self, Urbanization): + self.Urbanization = Urbanization + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_ZIP5(self): + return self.ZIP5 + def set_ZIP5(self, ZIP5): + self.ZIP5 = ZIP5 + def get_ZIP4(self): + return self.ZIP4 + def set_ZIP4(self, ZIP4): + self.ZIP4 = ZIP4 + def get_DayOfWeek(self): + return self.DayOfWeek + def set_DayOfWeek(self, DayOfWeek): + self.DayOfWeek = DayOfWeek + def get_Date(self): + return self.Date + def set_Date(self, Date): + self.Date = Date + def get_CarrierRoute(self): + return self.CarrierRoute + def set_CarrierRoute(self, CarrierRoute): + self.CarrierRoute = CarrierRoute + def has__content(self): + if ( + self.FirmName is not None or + self.SuiteOrApt is not None or + self.Address2 is not None or + self.Urbanization is not None or + self.City is not None or + self.State is not None or + self.ZIP5 is not None or + self.ZIP4 is not None or + self.DayOfWeek is not None or + self.Date is not None or + self.CarrierRoute is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupAvailabilityResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CarrierPickupAvailabilityResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CarrierPickupAvailabilityResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CarrierPickupAvailabilityResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CarrierPickupAvailabilityResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CarrierPickupAvailabilityResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupAvailabilityResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.SuiteOrApt is not None: + namespaceprefix_ = self.SuiteOrApt_nsprefix_ + ':' if (UseCapturedNS_ and self.SuiteOrApt_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSuiteOrApt>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SuiteOrApt), input_name='SuiteOrApt')), namespaceprefix_ , eol_)) + if self.Address2 is not None: + namespaceprefix_ = self.Address2_nsprefix_ + ':' if (UseCapturedNS_ and self.Address2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address2), input_name='Address2')), namespaceprefix_ , eol_)) + if self.Urbanization is not None: + namespaceprefix_ = self.Urbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.Urbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Urbanization), input_name='Urbanization')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.ZIP5 is not None: + namespaceprefix_ = self.ZIP5_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP5), input_name='ZIP5')), namespaceprefix_ , eol_)) + if self.ZIP4 is not None: + namespaceprefix_ = self.ZIP4_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP4), input_name='ZIP4')), namespaceprefix_ , eol_)) + if self.DayOfWeek is not None: + namespaceprefix_ = self.DayOfWeek_nsprefix_ + ':' if (UseCapturedNS_ and self.DayOfWeek_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDayOfWeek>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DayOfWeek), input_name='DayOfWeek')), namespaceprefix_ , eol_)) + if self.Date is not None: + namespaceprefix_ = self.Date_nsprefix_ + ':' if (UseCapturedNS_ and self.Date_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Date), input_name='Date')), namespaceprefix_ , eol_)) + if self.CarrierRoute is not None: + namespaceprefix_ = self.CarrierRoute_nsprefix_ + ':' if (UseCapturedNS_ and self.CarrierRoute_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCarrierRoute>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CarrierRoute), input_name='CarrierRoute')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'SuiteOrApt': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SuiteOrApt') + value_ = self.gds_validate_string(value_, node, 'SuiteOrApt') + self.SuiteOrApt = value_ + self.SuiteOrApt_nsprefix_ = child_.prefix + elif nodeName_ == 'Address2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address2') + value_ = self.gds_validate_string(value_, node, 'Address2') + self.Address2 = value_ + self.Address2_nsprefix_ = child_.prefix + elif nodeName_ == 'Urbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Urbanization') + value_ = self.gds_validate_string(value_, node, 'Urbanization') + self.Urbanization = value_ + self.Urbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP5') + value_ = self.gds_validate_string(value_, node, 'ZIP5') + self.ZIP5 = value_ + self.ZIP5_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP4') + value_ = self.gds_validate_string(value_, node, 'ZIP4') + self.ZIP4 = value_ + self.ZIP4_nsprefix_ = child_.prefix + elif nodeName_ == 'DayOfWeek': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DayOfWeek') + value_ = self.gds_validate_string(value_, node, 'DayOfWeek') + self.DayOfWeek = value_ + self.DayOfWeek_nsprefix_ = child_.prefix + elif nodeName_ == 'Date': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Date') + value_ = self.gds_validate_string(value_, node, 'Date') + self.Date = value_ + self.Date_nsprefix_ = child_.prefix + elif nodeName_ == 'CarrierRoute': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CarrierRoute') + value_ = self.gds_validate_string(value_, node, 'CarrierRoute') + self.CarrierRoute = value_ + self.CarrierRoute_nsprefix_ = child_.prefix +# end class CarrierPickupAvailabilityResponse + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupAvailabilityResponse' + rootClass = CarrierPickupAvailabilityResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupAvailabilityResponse' + rootClass = CarrierPickupAvailabilityResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupAvailabilityResponse' + rootClass = CarrierPickupAvailabilityResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupAvailabilityResponse' + rootClass = CarrierPickupAvailabilityResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from carrier_pickup_availability_response import *\n\n') + sys.stdout.write('import carrier_pickup_availability_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CarrierPickupAvailabilityResponse" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/carrier_pickup_cancel_request.py b/modules/connectors/usps/karrio/schemas/usps/carrier_pickup_cancel_request.py new file mode 100644 index 0000000000..4ac0b35c17 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/carrier_pickup_cancel_request.py @@ -0,0 +1,1471 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:39 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/carrier_pickup_cancel_request.py') +# +# Command line arguments: +# ./schemas/CarrierPickupCancelRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/carrier_pickup_cancel_request.py" ./schemas/CarrierPickupCancelRequest.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class CarrierPickupCancelRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, FirmName=None, SuiteOrApt=None, Address2=None, Urbanization=None, City=None, State=None, ZIP5=None, ZIP4=None, ConfirmationNumber=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.SuiteOrApt = SuiteOrApt + self.SuiteOrApt_nsprefix_ = None + self.Address2 = Address2 + self.Address2_nsprefix_ = None + self.Urbanization = Urbanization + self.Urbanization_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.ZIP5 = ZIP5 + self.ZIP5_nsprefix_ = None + self.ZIP4 = ZIP4 + self.ZIP4_nsprefix_ = None + self.ConfirmationNumber = ConfirmationNumber + self.ConfirmationNumber_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CarrierPickupCancelRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CarrierPickupCancelRequest.subclass: + return CarrierPickupCancelRequest.subclass(*args_, **kwargs_) + else: + return CarrierPickupCancelRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_SuiteOrApt(self): + return self.SuiteOrApt + def set_SuiteOrApt(self, SuiteOrApt): + self.SuiteOrApt = SuiteOrApt + def get_Address2(self): + return self.Address2 + def set_Address2(self, Address2): + self.Address2 = Address2 + def get_Urbanization(self): + return self.Urbanization + def set_Urbanization(self, Urbanization): + self.Urbanization = Urbanization + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_ZIP5(self): + return self.ZIP5 + def set_ZIP5(self, ZIP5): + self.ZIP5 = ZIP5 + def get_ZIP4(self): + return self.ZIP4 + def set_ZIP4(self, ZIP4): + self.ZIP4 = ZIP4 + def get_ConfirmationNumber(self): + return self.ConfirmationNumber + def set_ConfirmationNumber(self, ConfirmationNumber): + self.ConfirmationNumber = ConfirmationNumber + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.FirmName is not None or + self.SuiteOrApt is not None or + self.Address2 is not None or + self.Urbanization is not None or + self.City is not None or + self.State is not None or + self.ZIP5 is not None or + self.ZIP4 is not None or + self.ConfirmationNumber is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupCancelRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CarrierPickupCancelRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CarrierPickupCancelRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CarrierPickupCancelRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CarrierPickupCancelRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CarrierPickupCancelRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupCancelRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.SuiteOrApt is not None: + namespaceprefix_ = self.SuiteOrApt_nsprefix_ + ':' if (UseCapturedNS_ and self.SuiteOrApt_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSuiteOrApt>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SuiteOrApt), input_name='SuiteOrApt')), namespaceprefix_ , eol_)) + if self.Address2 is not None: + namespaceprefix_ = self.Address2_nsprefix_ + ':' if (UseCapturedNS_ and self.Address2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address2), input_name='Address2')), namespaceprefix_ , eol_)) + if self.Urbanization is not None: + namespaceprefix_ = self.Urbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.Urbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Urbanization), input_name='Urbanization')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.ZIP5 is not None: + namespaceprefix_ = self.ZIP5_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP5), input_name='ZIP5')), namespaceprefix_ , eol_)) + if self.ZIP4 is not None: + namespaceprefix_ = self.ZIP4_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP4), input_name='ZIP4')), namespaceprefix_ , eol_)) + if self.ConfirmationNumber is not None: + namespaceprefix_ = self.ConfirmationNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.ConfirmationNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sConfirmationNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ConfirmationNumber), input_name='ConfirmationNumber')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'SuiteOrApt': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SuiteOrApt') + value_ = self.gds_validate_string(value_, node, 'SuiteOrApt') + self.SuiteOrApt = value_ + self.SuiteOrApt_nsprefix_ = child_.prefix + elif nodeName_ == 'Address2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address2') + value_ = self.gds_validate_string(value_, node, 'Address2') + self.Address2 = value_ + self.Address2_nsprefix_ = child_.prefix + elif nodeName_ == 'Urbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Urbanization') + value_ = self.gds_validate_string(value_, node, 'Urbanization') + self.Urbanization = value_ + self.Urbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP5') + value_ = self.gds_validate_string(value_, node, 'ZIP5') + self.ZIP5 = value_ + self.ZIP5_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP4') + value_ = self.gds_validate_string(value_, node, 'ZIP4') + self.ZIP4 = value_ + self.ZIP4_nsprefix_ = child_.prefix + elif nodeName_ == 'ConfirmationNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ConfirmationNumber') + value_ = self.gds_validate_string(value_, node, 'ConfirmationNumber') + self.ConfirmationNumber = value_ + self.ConfirmationNumber_nsprefix_ = child_.prefix +# end class CarrierPickupCancelRequest + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupCancelRequest' + rootClass = CarrierPickupCancelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupCancelRequest' + rootClass = CarrierPickupCancelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupCancelRequest' + rootClass = CarrierPickupCancelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupCancelRequest' + rootClass = CarrierPickupCancelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from carrier_pickup_cancel_request import *\n\n') + sys.stdout.write('import carrier_pickup_cancel_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CarrierPickupCancelRequest" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/carrier_pickup_cancel_response.py b/modules/connectors/usps/karrio/schemas/usps/carrier_pickup_cancel_response.py new file mode 100644 index 0000000000..beb3b47905 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/carrier_pickup_cancel_response.py @@ -0,0 +1,1754 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:39 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/carrier_pickup_cancel_response.py') +# +# Command line arguments: +# ./schemas/CarrierPickupCancelResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/carrier_pickup_cancel_response.py" ./schemas/CarrierPickupCancelResponse.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class CarrierPickupCancelResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, FirstName=None, LastName=None, FirmName=None, SuiteOrApt=None, Address2=None, Urbanization=None, City=None, State=None, ZIP5=None, ZIP4=None, Phone=None, Extension=None, Package=None, EstimatedWeight=None, PackageLocation=None, SpecialInstructions=None, ConfirmationNumber=None, DayOfWeek=None, Date=None, CarrierRoute=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.FirstName = FirstName + self.FirstName_nsprefix_ = None + self.LastName = LastName + self.LastName_nsprefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.SuiteOrApt = SuiteOrApt + self.SuiteOrApt_nsprefix_ = None + self.Address2 = Address2 + self.Address2_nsprefix_ = None + self.Urbanization = Urbanization + self.Urbanization_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.ZIP5 = ZIP5 + self.ZIP5_nsprefix_ = None + self.ZIP4 = ZIP4 + self.ZIP4_nsprefix_ = None + self.Phone = Phone + self.Phone_nsprefix_ = None + self.Extension = Extension + self.Extension_nsprefix_ = None + if Package is None: + self.Package = [] + else: + self.Package = Package + self.Package_nsprefix_ = None + self.EstimatedWeight = EstimatedWeight + self.EstimatedWeight_nsprefix_ = None + self.PackageLocation = PackageLocation + self.PackageLocation_nsprefix_ = None + self.SpecialInstructions = SpecialInstructions + self.SpecialInstructions_nsprefix_ = None + self.ConfirmationNumber = ConfirmationNumber + self.ConfirmationNumber_nsprefix_ = None + self.DayOfWeek = DayOfWeek + self.DayOfWeek_nsprefix_ = None + self.Date = Date + self.Date_nsprefix_ = None + self.CarrierRoute = CarrierRoute + self.CarrierRoute_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CarrierPickupCancelResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CarrierPickupCancelResponse.subclass: + return CarrierPickupCancelResponse.subclass(*args_, **kwargs_) + else: + return CarrierPickupCancelResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FirstName(self): + return self.FirstName + def set_FirstName(self, FirstName): + self.FirstName = FirstName + def get_LastName(self): + return self.LastName + def set_LastName(self, LastName): + self.LastName = LastName + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_SuiteOrApt(self): + return self.SuiteOrApt + def set_SuiteOrApt(self, SuiteOrApt): + self.SuiteOrApt = SuiteOrApt + def get_Address2(self): + return self.Address2 + def set_Address2(self, Address2): + self.Address2 = Address2 + def get_Urbanization(self): + return self.Urbanization + def set_Urbanization(self, Urbanization): + self.Urbanization = Urbanization + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_ZIP5(self): + return self.ZIP5 + def set_ZIP5(self, ZIP5): + self.ZIP5 = ZIP5 + def get_ZIP4(self): + return self.ZIP4 + def set_ZIP4(self, ZIP4): + self.ZIP4 = ZIP4 + def get_Phone(self): + return self.Phone + def set_Phone(self, Phone): + self.Phone = Phone + def get_Extension(self): + return self.Extension + def set_Extension(self, Extension): + self.Extension = Extension + def get_Package(self): + return self.Package + def set_Package(self, Package): + self.Package = Package + def add_Package(self, value): + self.Package.append(value) + def insert_Package_at(self, index, value): + self.Package.insert(index, value) + def replace_Package_at(self, index, value): + self.Package[index] = value + def get_EstimatedWeight(self): + return self.EstimatedWeight + def set_EstimatedWeight(self, EstimatedWeight): + self.EstimatedWeight = EstimatedWeight + def get_PackageLocation(self): + return self.PackageLocation + def set_PackageLocation(self, PackageLocation): + self.PackageLocation = PackageLocation + def get_SpecialInstructions(self): + return self.SpecialInstructions + def set_SpecialInstructions(self, SpecialInstructions): + self.SpecialInstructions = SpecialInstructions + def get_ConfirmationNumber(self): + return self.ConfirmationNumber + def set_ConfirmationNumber(self, ConfirmationNumber): + self.ConfirmationNumber = ConfirmationNumber + def get_DayOfWeek(self): + return self.DayOfWeek + def set_DayOfWeek(self, DayOfWeek): + self.DayOfWeek = DayOfWeek + def get_Date(self): + return self.Date + def set_Date(self, Date): + self.Date = Date + def get_CarrierRoute(self): + return self.CarrierRoute + def set_CarrierRoute(self, CarrierRoute): + self.CarrierRoute = CarrierRoute + def has__content(self): + if ( + self.FirstName is not None or + self.LastName is not None or + self.FirmName is not None or + self.SuiteOrApt is not None or + self.Address2 is not None or + self.Urbanization is not None or + self.City is not None or + self.State is not None or + self.ZIP5 is not None or + self.ZIP4 is not None or + self.Phone is not None or + self.Extension is not None or + self.Package or + self.EstimatedWeight is not None or + self.PackageLocation is not None or + self.SpecialInstructions is not None or + self.ConfirmationNumber is not None or + self.DayOfWeek is not None or + self.Date is not None or + self.CarrierRoute is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupCancelResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CarrierPickupCancelResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CarrierPickupCancelResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CarrierPickupCancelResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CarrierPickupCancelResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CarrierPickupCancelResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupCancelResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FirstName is not None: + namespaceprefix_ = self.FirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirstName), input_name='FirstName')), namespaceprefix_ , eol_)) + if self.LastName is not None: + namespaceprefix_ = self.LastName_nsprefix_ + ':' if (UseCapturedNS_ and self.LastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LastName), input_name='LastName')), namespaceprefix_ , eol_)) + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.SuiteOrApt is not None: + namespaceprefix_ = self.SuiteOrApt_nsprefix_ + ':' if (UseCapturedNS_ and self.SuiteOrApt_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSuiteOrApt>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SuiteOrApt), input_name='SuiteOrApt')), namespaceprefix_ , eol_)) + if self.Address2 is not None: + namespaceprefix_ = self.Address2_nsprefix_ + ':' if (UseCapturedNS_ and self.Address2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address2), input_name='Address2')), namespaceprefix_ , eol_)) + if self.Urbanization is not None: + namespaceprefix_ = self.Urbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.Urbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Urbanization), input_name='Urbanization')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.ZIP5 is not None: + namespaceprefix_ = self.ZIP5_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP5), input_name='ZIP5')), namespaceprefix_ , eol_)) + if self.ZIP4 is not None: + namespaceprefix_ = self.ZIP4_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP4), input_name='ZIP4')), namespaceprefix_ , eol_)) + if self.Phone is not None: + namespaceprefix_ = self.Phone_nsprefix_ + ':' if (UseCapturedNS_ and self.Phone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Phone), input_name='Phone')), namespaceprefix_ , eol_)) + if self.Extension is not None: + namespaceprefix_ = self.Extension_nsprefix_ + ':' if (UseCapturedNS_ and self.Extension_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sExtension>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Extension), input_name='Extension')), namespaceprefix_ , eol_)) + for Package_ in self.Package: + namespaceprefix_ = self.Package_nsprefix_ + ':' if (UseCapturedNS_ and self.Package_nsprefix_) else '' + Package_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Package', pretty_print=pretty_print) + if self.EstimatedWeight is not None: + namespaceprefix_ = self.EstimatedWeight_nsprefix_ + ':' if (UseCapturedNS_ and self.EstimatedWeight_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEstimatedWeight>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EstimatedWeight), input_name='EstimatedWeight')), namespaceprefix_ , eol_)) + if self.PackageLocation is not None: + namespaceprefix_ = self.PackageLocation_nsprefix_ + ':' if (UseCapturedNS_ and self.PackageLocation_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPackageLocation>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PackageLocation), input_name='PackageLocation')), namespaceprefix_ , eol_)) + if self.SpecialInstructions is not None: + namespaceprefix_ = self.SpecialInstructions_nsprefix_ + ':' if (UseCapturedNS_ and self.SpecialInstructions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSpecialInstructions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SpecialInstructions), input_name='SpecialInstructions')), namespaceprefix_ , eol_)) + if self.ConfirmationNumber is not None: + namespaceprefix_ = self.ConfirmationNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.ConfirmationNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sConfirmationNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ConfirmationNumber), input_name='ConfirmationNumber')), namespaceprefix_ , eol_)) + if self.DayOfWeek is not None: + namespaceprefix_ = self.DayOfWeek_nsprefix_ + ':' if (UseCapturedNS_ and self.DayOfWeek_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDayOfWeek>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DayOfWeek), input_name='DayOfWeek')), namespaceprefix_ , eol_)) + if self.Date is not None: + namespaceprefix_ = self.Date_nsprefix_ + ':' if (UseCapturedNS_ and self.Date_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Date), input_name='Date')), namespaceprefix_ , eol_)) + if self.CarrierRoute is not None: + namespaceprefix_ = self.CarrierRoute_nsprefix_ + ':' if (UseCapturedNS_ and self.CarrierRoute_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCarrierRoute>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CarrierRoute), input_name='CarrierRoute')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirstName') + value_ = self.gds_validate_string(value_, node, 'FirstName') + self.FirstName = value_ + self.FirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'LastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LastName') + value_ = self.gds_validate_string(value_, node, 'LastName') + self.LastName = value_ + self.LastName_nsprefix_ = child_.prefix + elif nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'SuiteOrApt': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SuiteOrApt') + value_ = self.gds_validate_string(value_, node, 'SuiteOrApt') + self.SuiteOrApt = value_ + self.SuiteOrApt_nsprefix_ = child_.prefix + elif nodeName_ == 'Address2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address2') + value_ = self.gds_validate_string(value_, node, 'Address2') + self.Address2 = value_ + self.Address2_nsprefix_ = child_.prefix + elif nodeName_ == 'Urbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Urbanization') + value_ = self.gds_validate_string(value_, node, 'Urbanization') + self.Urbanization = value_ + self.Urbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP5') + value_ = self.gds_validate_string(value_, node, 'ZIP5') + self.ZIP5 = value_ + self.ZIP5_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP4') + value_ = self.gds_validate_string(value_, node, 'ZIP4') + self.ZIP4 = value_ + self.ZIP4_nsprefix_ = child_.prefix + elif nodeName_ == 'Phone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Phone') + value_ = self.gds_validate_string(value_, node, 'Phone') + self.Phone = value_ + self.Phone_nsprefix_ = child_.prefix + elif nodeName_ == 'Extension': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Extension') + value_ = self.gds_validate_string(value_, node, 'Extension') + self.Extension = value_ + self.Extension_nsprefix_ = child_.prefix + elif nodeName_ == 'Package': + obj_ = PackageType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Package.append(obj_) + obj_.original_tagname_ = 'Package' + elif nodeName_ == 'EstimatedWeight': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EstimatedWeight') + value_ = self.gds_validate_string(value_, node, 'EstimatedWeight') + self.EstimatedWeight = value_ + self.EstimatedWeight_nsprefix_ = child_.prefix + elif nodeName_ == 'PackageLocation': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PackageLocation') + value_ = self.gds_validate_string(value_, node, 'PackageLocation') + self.PackageLocation = value_ + self.PackageLocation_nsprefix_ = child_.prefix + elif nodeName_ == 'SpecialInstructions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SpecialInstructions') + value_ = self.gds_validate_string(value_, node, 'SpecialInstructions') + self.SpecialInstructions = value_ + self.SpecialInstructions_nsprefix_ = child_.prefix + elif nodeName_ == 'ConfirmationNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ConfirmationNumber') + value_ = self.gds_validate_string(value_, node, 'ConfirmationNumber') + self.ConfirmationNumber = value_ + self.ConfirmationNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'DayOfWeek': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DayOfWeek') + value_ = self.gds_validate_string(value_, node, 'DayOfWeek') + self.DayOfWeek = value_ + self.DayOfWeek_nsprefix_ = child_.prefix + elif nodeName_ == 'Date': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Date') + value_ = self.gds_validate_string(value_, node, 'Date') + self.Date = value_ + self.Date_nsprefix_ = child_.prefix + elif nodeName_ == 'CarrierRoute': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CarrierRoute') + value_ = self.gds_validate_string(value_, node, 'CarrierRoute') + self.CarrierRoute = value_ + self.CarrierRoute_nsprefix_ = child_.prefix +# end class CarrierPickupCancelResponse + + +class PackageType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ServiceType=None, Count=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ServiceType = ServiceType + self.ServiceType_nsprefix_ = None + self.Count = Count + self.Count_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PackageType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PackageType.subclass: + return PackageType.subclass(*args_, **kwargs_) + else: + return PackageType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ServiceType(self): + return self.ServiceType + def set_ServiceType(self, ServiceType): + self.ServiceType = ServiceType + def get_Count(self): + return self.Count + def set_Count(self, Count): + self.Count = Count + def has__content(self): + if ( + self.ServiceType is not None or + self.Count is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PackageType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PackageType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PackageType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PackageType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PackageType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ServiceType is not None: + namespaceprefix_ = self.ServiceType_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceType), input_name='ServiceType')), namespaceprefix_ , eol_)) + if self.Count is not None: + namespaceprefix_ = self.Count_nsprefix_ + ':' if (UseCapturedNS_ and self.Count_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCount>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Count, input_name='Count'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ServiceType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceType') + value_ = self.gds_validate_string(value_, node, 'ServiceType') + self.ServiceType = value_ + self.ServiceType_nsprefix_ = child_.prefix + elif nodeName_ == 'Count' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Count') + ival_ = self.gds_validate_integer(ival_, node, 'Count') + self.Count = ival_ + self.Count_nsprefix_ = child_.prefix +# end class PackageType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupCancelResponse' + rootClass = CarrierPickupCancelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupCancelResponse' + rootClass = CarrierPickupCancelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupCancelResponse' + rootClass = CarrierPickupCancelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupCancelResponse' + rootClass = CarrierPickupCancelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from carrier_pickup_cancel_response import *\n\n') + sys.stdout.write('import carrier_pickup_cancel_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CarrierPickupCancelResponse", + "PackageType" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/carrier_pickup_change_request.py b/modules/connectors/usps/karrio/schemas/usps/carrier_pickup_change_request.py new file mode 100644 index 0000000000..026c92692f --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/carrier_pickup_change_request.py @@ -0,0 +1,1744 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:39 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/carrier_pickup_change_request.py') +# +# Command line arguments: +# ./schemas/CarrierPickupChangeRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/carrier_pickup_change_request.py" ./schemas/CarrierPickupChangeRequest.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class CarrierPickupChangeRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, FirstName=None, LastName=None, FirmName=None, SuiteOrApt=None, Address2=None, Urbanization=None, City=None, State=None, ZIP5=None, ZIP4=None, Phone=None, Extension=None, Package=None, EstimatedWeight=None, PackageLocation=None, SpecialInstructions=None, ConfirmationNumber=None, EmailAddress=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.FirstName = FirstName + self.FirstName_nsprefix_ = None + self.LastName = LastName + self.LastName_nsprefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.SuiteOrApt = SuiteOrApt + self.SuiteOrApt_nsprefix_ = None + self.Address2 = Address2 + self.Address2_nsprefix_ = None + self.Urbanization = Urbanization + self.Urbanization_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.ZIP5 = ZIP5 + self.ZIP5_nsprefix_ = None + self.ZIP4 = ZIP4 + self.ZIP4_nsprefix_ = None + self.Phone = Phone + self.Phone_nsprefix_ = None + self.Extension = Extension + self.Extension_nsprefix_ = None + if Package is None: + self.Package = [] + else: + self.Package = Package + self.Package_nsprefix_ = None + self.EstimatedWeight = EstimatedWeight + self.EstimatedWeight_nsprefix_ = None + self.PackageLocation = PackageLocation + self.PackageLocation_nsprefix_ = None + self.SpecialInstructions = SpecialInstructions + self.SpecialInstructions_nsprefix_ = None + self.ConfirmationNumber = ConfirmationNumber + self.ConfirmationNumber_nsprefix_ = None + self.EmailAddress = EmailAddress + self.EmailAddress_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CarrierPickupChangeRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CarrierPickupChangeRequest.subclass: + return CarrierPickupChangeRequest.subclass(*args_, **kwargs_) + else: + return CarrierPickupChangeRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FirstName(self): + return self.FirstName + def set_FirstName(self, FirstName): + self.FirstName = FirstName + def get_LastName(self): + return self.LastName + def set_LastName(self, LastName): + self.LastName = LastName + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_SuiteOrApt(self): + return self.SuiteOrApt + def set_SuiteOrApt(self, SuiteOrApt): + self.SuiteOrApt = SuiteOrApt + def get_Address2(self): + return self.Address2 + def set_Address2(self, Address2): + self.Address2 = Address2 + def get_Urbanization(self): + return self.Urbanization + def set_Urbanization(self, Urbanization): + self.Urbanization = Urbanization + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_ZIP5(self): + return self.ZIP5 + def set_ZIP5(self, ZIP5): + self.ZIP5 = ZIP5 + def get_ZIP4(self): + return self.ZIP4 + def set_ZIP4(self, ZIP4): + self.ZIP4 = ZIP4 + def get_Phone(self): + return self.Phone + def set_Phone(self, Phone): + self.Phone = Phone + def get_Extension(self): + return self.Extension + def set_Extension(self, Extension): + self.Extension = Extension + def get_Package(self): + return self.Package + def set_Package(self, Package): + self.Package = Package + def add_Package(self, value): + self.Package.append(value) + def insert_Package_at(self, index, value): + self.Package.insert(index, value) + def replace_Package_at(self, index, value): + self.Package[index] = value + def get_EstimatedWeight(self): + return self.EstimatedWeight + def set_EstimatedWeight(self, EstimatedWeight): + self.EstimatedWeight = EstimatedWeight + def get_PackageLocation(self): + return self.PackageLocation + def set_PackageLocation(self, PackageLocation): + self.PackageLocation = PackageLocation + def get_SpecialInstructions(self): + return self.SpecialInstructions + def set_SpecialInstructions(self, SpecialInstructions): + self.SpecialInstructions = SpecialInstructions + def get_ConfirmationNumber(self): + return self.ConfirmationNumber + def set_ConfirmationNumber(self, ConfirmationNumber): + self.ConfirmationNumber = ConfirmationNumber + def get_EmailAddress(self): + return self.EmailAddress + def set_EmailAddress(self, EmailAddress): + self.EmailAddress = EmailAddress + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.FirstName is not None or + self.LastName is not None or + self.FirmName is not None or + self.SuiteOrApt is not None or + self.Address2 is not None or + self.Urbanization is not None or + self.City is not None or + self.State is not None or + self.ZIP5 is not None or + self.ZIP4 is not None or + self.Phone is not None or + self.Extension is not None or + self.Package or + self.EstimatedWeight is not None or + self.PackageLocation is not None or + self.SpecialInstructions is not None or + self.ConfirmationNumber is not None or + self.EmailAddress is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupChangeRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CarrierPickupChangeRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CarrierPickupChangeRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CarrierPickupChangeRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CarrierPickupChangeRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CarrierPickupChangeRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupChangeRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FirstName is not None: + namespaceprefix_ = self.FirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirstName), input_name='FirstName')), namespaceprefix_ , eol_)) + if self.LastName is not None: + namespaceprefix_ = self.LastName_nsprefix_ + ':' if (UseCapturedNS_ and self.LastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LastName), input_name='LastName')), namespaceprefix_ , eol_)) + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.SuiteOrApt is not None: + namespaceprefix_ = self.SuiteOrApt_nsprefix_ + ':' if (UseCapturedNS_ and self.SuiteOrApt_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSuiteOrApt>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SuiteOrApt), input_name='SuiteOrApt')), namespaceprefix_ , eol_)) + if self.Address2 is not None: + namespaceprefix_ = self.Address2_nsprefix_ + ':' if (UseCapturedNS_ and self.Address2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address2), input_name='Address2')), namespaceprefix_ , eol_)) + if self.Urbanization is not None: + namespaceprefix_ = self.Urbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.Urbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Urbanization), input_name='Urbanization')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.ZIP5 is not None: + namespaceprefix_ = self.ZIP5_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP5), input_name='ZIP5')), namespaceprefix_ , eol_)) + if self.ZIP4 is not None: + namespaceprefix_ = self.ZIP4_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP4), input_name='ZIP4')), namespaceprefix_ , eol_)) + if self.Phone is not None: + namespaceprefix_ = self.Phone_nsprefix_ + ':' if (UseCapturedNS_ and self.Phone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Phone), input_name='Phone')), namespaceprefix_ , eol_)) + if self.Extension is not None: + namespaceprefix_ = self.Extension_nsprefix_ + ':' if (UseCapturedNS_ and self.Extension_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sExtension>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Extension), input_name='Extension')), namespaceprefix_ , eol_)) + for Package_ in self.Package: + namespaceprefix_ = self.Package_nsprefix_ + ':' if (UseCapturedNS_ and self.Package_nsprefix_) else '' + Package_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Package', pretty_print=pretty_print) + if self.EstimatedWeight is not None: + namespaceprefix_ = self.EstimatedWeight_nsprefix_ + ':' if (UseCapturedNS_ and self.EstimatedWeight_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEstimatedWeight>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EstimatedWeight), input_name='EstimatedWeight')), namespaceprefix_ , eol_)) + if self.PackageLocation is not None: + namespaceprefix_ = self.PackageLocation_nsprefix_ + ':' if (UseCapturedNS_ and self.PackageLocation_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPackageLocation>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PackageLocation), input_name='PackageLocation')), namespaceprefix_ , eol_)) + if self.SpecialInstructions is not None: + namespaceprefix_ = self.SpecialInstructions_nsprefix_ + ':' if (UseCapturedNS_ and self.SpecialInstructions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSpecialInstructions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SpecialInstructions), input_name='SpecialInstructions')), namespaceprefix_ , eol_)) + if self.ConfirmationNumber is not None: + namespaceprefix_ = self.ConfirmationNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.ConfirmationNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sConfirmationNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ConfirmationNumber), input_name='ConfirmationNumber')), namespaceprefix_ , eol_)) + if self.EmailAddress is not None: + namespaceprefix_ = self.EmailAddress_nsprefix_ + ':' if (UseCapturedNS_ and self.EmailAddress_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEmailAddress>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EmailAddress), input_name='EmailAddress')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirstName') + value_ = self.gds_validate_string(value_, node, 'FirstName') + self.FirstName = value_ + self.FirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'LastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LastName') + value_ = self.gds_validate_string(value_, node, 'LastName') + self.LastName = value_ + self.LastName_nsprefix_ = child_.prefix + elif nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'SuiteOrApt': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SuiteOrApt') + value_ = self.gds_validate_string(value_, node, 'SuiteOrApt') + self.SuiteOrApt = value_ + self.SuiteOrApt_nsprefix_ = child_.prefix + elif nodeName_ == 'Address2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address2') + value_ = self.gds_validate_string(value_, node, 'Address2') + self.Address2 = value_ + self.Address2_nsprefix_ = child_.prefix + elif nodeName_ == 'Urbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Urbanization') + value_ = self.gds_validate_string(value_, node, 'Urbanization') + self.Urbanization = value_ + self.Urbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP5') + value_ = self.gds_validate_string(value_, node, 'ZIP5') + self.ZIP5 = value_ + self.ZIP5_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP4') + value_ = self.gds_validate_string(value_, node, 'ZIP4') + self.ZIP4 = value_ + self.ZIP4_nsprefix_ = child_.prefix + elif nodeName_ == 'Phone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Phone') + value_ = self.gds_validate_string(value_, node, 'Phone') + self.Phone = value_ + self.Phone_nsprefix_ = child_.prefix + elif nodeName_ == 'Extension': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Extension') + value_ = self.gds_validate_string(value_, node, 'Extension') + self.Extension = value_ + self.Extension_nsprefix_ = child_.prefix + elif nodeName_ == 'Package': + obj_ = PackageType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Package.append(obj_) + obj_.original_tagname_ = 'Package' + elif nodeName_ == 'EstimatedWeight': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EstimatedWeight') + value_ = self.gds_validate_string(value_, node, 'EstimatedWeight') + self.EstimatedWeight = value_ + self.EstimatedWeight_nsprefix_ = child_.prefix + elif nodeName_ == 'PackageLocation': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PackageLocation') + value_ = self.gds_validate_string(value_, node, 'PackageLocation') + self.PackageLocation = value_ + self.PackageLocation_nsprefix_ = child_.prefix + elif nodeName_ == 'SpecialInstructions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SpecialInstructions') + value_ = self.gds_validate_string(value_, node, 'SpecialInstructions') + self.SpecialInstructions = value_ + self.SpecialInstructions_nsprefix_ = child_.prefix + elif nodeName_ == 'ConfirmationNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ConfirmationNumber') + value_ = self.gds_validate_string(value_, node, 'ConfirmationNumber') + self.ConfirmationNumber = value_ + self.ConfirmationNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'EmailAddress': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EmailAddress') + value_ = self.gds_validate_string(value_, node, 'EmailAddress') + self.EmailAddress = value_ + self.EmailAddress_nsprefix_ = child_.prefix +# end class CarrierPickupChangeRequest + + +class PackageType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ServiceType=None, Count=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ServiceType = ServiceType + self.ServiceType_nsprefix_ = None + self.Count = Count + self.Count_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PackageType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PackageType.subclass: + return PackageType.subclass(*args_, **kwargs_) + else: + return PackageType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ServiceType(self): + return self.ServiceType + def set_ServiceType(self, ServiceType): + self.ServiceType = ServiceType + def get_Count(self): + return self.Count + def set_Count(self, Count): + self.Count = Count + def has__content(self): + if ( + self.ServiceType is not None or + self.Count is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PackageType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PackageType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PackageType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PackageType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PackageType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ServiceType is not None: + namespaceprefix_ = self.ServiceType_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceType), input_name='ServiceType')), namespaceprefix_ , eol_)) + if self.Count is not None: + namespaceprefix_ = self.Count_nsprefix_ + ':' if (UseCapturedNS_ and self.Count_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCount>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Count, input_name='Count'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ServiceType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceType') + value_ = self.gds_validate_string(value_, node, 'ServiceType') + self.ServiceType = value_ + self.ServiceType_nsprefix_ = child_.prefix + elif nodeName_ == 'Count' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Count') + ival_ = self.gds_validate_integer(ival_, node, 'Count') + self.Count = ival_ + self.Count_nsprefix_ = child_.prefix +# end class PackageType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupChangeRequest' + rootClass = CarrierPickupChangeRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupChangeRequest' + rootClass = CarrierPickupChangeRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupChangeRequest' + rootClass = CarrierPickupChangeRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupChangeRequest' + rootClass = CarrierPickupChangeRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from carrier_pickup_change_request import *\n\n') + sys.stdout.write('import carrier_pickup_change_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CarrierPickupChangeRequest", + "PackageType" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/carrier_pickup_change_response.py b/modules/connectors/usps/karrio/schemas/usps/carrier_pickup_change_response.py new file mode 100644 index 0000000000..132ca8b438 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/carrier_pickup_change_response.py @@ -0,0 +1,1754 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:39 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/carrier_pickup_change_response.py') +# +# Command line arguments: +# ./schemas/CarrierPickupChangeResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/carrier_pickup_change_response.py" ./schemas/CarrierPickupChangeResponse.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class CarrierPickupChangeResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, FirstName=None, LastName=None, FirmName=None, SuiteOrApt=None, Address2=None, Urbanization=None, City=None, State=None, ZIP5=None, ZIP4=None, Phone=None, Extension=None, Package=None, EstimatedWeight=None, PackageLocation=None, SpecialInstructions=None, ConfirmationNumber=None, DayOfWeek=None, Date=None, Status=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.FirstName = FirstName + self.FirstName_nsprefix_ = None + self.LastName = LastName + self.LastName_nsprefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.SuiteOrApt = SuiteOrApt + self.SuiteOrApt_nsprefix_ = None + self.Address2 = Address2 + self.Address2_nsprefix_ = None + self.Urbanization = Urbanization + self.Urbanization_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.ZIP5 = ZIP5 + self.ZIP5_nsprefix_ = None + self.ZIP4 = ZIP4 + self.ZIP4_nsprefix_ = None + self.Phone = Phone + self.Phone_nsprefix_ = None + self.Extension = Extension + self.Extension_nsprefix_ = None + if Package is None: + self.Package = [] + else: + self.Package = Package + self.Package_nsprefix_ = None + self.EstimatedWeight = EstimatedWeight + self.EstimatedWeight_nsprefix_ = None + self.PackageLocation = PackageLocation + self.PackageLocation_nsprefix_ = None + self.SpecialInstructions = SpecialInstructions + self.SpecialInstructions_nsprefix_ = None + self.ConfirmationNumber = ConfirmationNumber + self.ConfirmationNumber_nsprefix_ = None + self.DayOfWeek = DayOfWeek + self.DayOfWeek_nsprefix_ = None + self.Date = Date + self.Date_nsprefix_ = None + self.Status = Status + self.Status_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CarrierPickupChangeResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CarrierPickupChangeResponse.subclass: + return CarrierPickupChangeResponse.subclass(*args_, **kwargs_) + else: + return CarrierPickupChangeResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FirstName(self): + return self.FirstName + def set_FirstName(self, FirstName): + self.FirstName = FirstName + def get_LastName(self): + return self.LastName + def set_LastName(self, LastName): + self.LastName = LastName + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_SuiteOrApt(self): + return self.SuiteOrApt + def set_SuiteOrApt(self, SuiteOrApt): + self.SuiteOrApt = SuiteOrApt + def get_Address2(self): + return self.Address2 + def set_Address2(self, Address2): + self.Address2 = Address2 + def get_Urbanization(self): + return self.Urbanization + def set_Urbanization(self, Urbanization): + self.Urbanization = Urbanization + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_ZIP5(self): + return self.ZIP5 + def set_ZIP5(self, ZIP5): + self.ZIP5 = ZIP5 + def get_ZIP4(self): + return self.ZIP4 + def set_ZIP4(self, ZIP4): + self.ZIP4 = ZIP4 + def get_Phone(self): + return self.Phone + def set_Phone(self, Phone): + self.Phone = Phone + def get_Extension(self): + return self.Extension + def set_Extension(self, Extension): + self.Extension = Extension + def get_Package(self): + return self.Package + def set_Package(self, Package): + self.Package = Package + def add_Package(self, value): + self.Package.append(value) + def insert_Package_at(self, index, value): + self.Package.insert(index, value) + def replace_Package_at(self, index, value): + self.Package[index] = value + def get_EstimatedWeight(self): + return self.EstimatedWeight + def set_EstimatedWeight(self, EstimatedWeight): + self.EstimatedWeight = EstimatedWeight + def get_PackageLocation(self): + return self.PackageLocation + def set_PackageLocation(self, PackageLocation): + self.PackageLocation = PackageLocation + def get_SpecialInstructions(self): + return self.SpecialInstructions + def set_SpecialInstructions(self, SpecialInstructions): + self.SpecialInstructions = SpecialInstructions + def get_ConfirmationNumber(self): + return self.ConfirmationNumber + def set_ConfirmationNumber(self, ConfirmationNumber): + self.ConfirmationNumber = ConfirmationNumber + def get_DayOfWeek(self): + return self.DayOfWeek + def set_DayOfWeek(self, DayOfWeek): + self.DayOfWeek = DayOfWeek + def get_Date(self): + return self.Date + def set_Date(self, Date): + self.Date = Date + def get_Status(self): + return self.Status + def set_Status(self, Status): + self.Status = Status + def has__content(self): + if ( + self.FirstName is not None or + self.LastName is not None or + self.FirmName is not None or + self.SuiteOrApt is not None or + self.Address2 is not None or + self.Urbanization is not None or + self.City is not None or + self.State is not None or + self.ZIP5 is not None or + self.ZIP4 is not None or + self.Phone is not None or + self.Extension is not None or + self.Package or + self.EstimatedWeight is not None or + self.PackageLocation is not None or + self.SpecialInstructions is not None or + self.ConfirmationNumber is not None or + self.DayOfWeek is not None or + self.Date is not None or + self.Status is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupChangeResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CarrierPickupChangeResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CarrierPickupChangeResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CarrierPickupChangeResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CarrierPickupChangeResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CarrierPickupChangeResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupChangeResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FirstName is not None: + namespaceprefix_ = self.FirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirstName), input_name='FirstName')), namespaceprefix_ , eol_)) + if self.LastName is not None: + namespaceprefix_ = self.LastName_nsprefix_ + ':' if (UseCapturedNS_ and self.LastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LastName), input_name='LastName')), namespaceprefix_ , eol_)) + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.SuiteOrApt is not None: + namespaceprefix_ = self.SuiteOrApt_nsprefix_ + ':' if (UseCapturedNS_ and self.SuiteOrApt_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSuiteOrApt>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SuiteOrApt), input_name='SuiteOrApt')), namespaceprefix_ , eol_)) + if self.Address2 is not None: + namespaceprefix_ = self.Address2_nsprefix_ + ':' if (UseCapturedNS_ and self.Address2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address2), input_name='Address2')), namespaceprefix_ , eol_)) + if self.Urbanization is not None: + namespaceprefix_ = self.Urbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.Urbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Urbanization), input_name='Urbanization')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.ZIP5 is not None: + namespaceprefix_ = self.ZIP5_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP5), input_name='ZIP5')), namespaceprefix_ , eol_)) + if self.ZIP4 is not None: + namespaceprefix_ = self.ZIP4_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP4), input_name='ZIP4')), namespaceprefix_ , eol_)) + if self.Phone is not None: + namespaceprefix_ = self.Phone_nsprefix_ + ':' if (UseCapturedNS_ and self.Phone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Phone), input_name='Phone')), namespaceprefix_ , eol_)) + if self.Extension is not None: + namespaceprefix_ = self.Extension_nsprefix_ + ':' if (UseCapturedNS_ and self.Extension_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sExtension>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Extension), input_name='Extension')), namespaceprefix_ , eol_)) + for Package_ in self.Package: + namespaceprefix_ = self.Package_nsprefix_ + ':' if (UseCapturedNS_ and self.Package_nsprefix_) else '' + Package_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Package', pretty_print=pretty_print) + if self.EstimatedWeight is not None: + namespaceprefix_ = self.EstimatedWeight_nsprefix_ + ':' if (UseCapturedNS_ and self.EstimatedWeight_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEstimatedWeight>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EstimatedWeight), input_name='EstimatedWeight')), namespaceprefix_ , eol_)) + if self.PackageLocation is not None: + namespaceprefix_ = self.PackageLocation_nsprefix_ + ':' if (UseCapturedNS_ and self.PackageLocation_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPackageLocation>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PackageLocation), input_name='PackageLocation')), namespaceprefix_ , eol_)) + if self.SpecialInstructions is not None: + namespaceprefix_ = self.SpecialInstructions_nsprefix_ + ':' if (UseCapturedNS_ and self.SpecialInstructions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSpecialInstructions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SpecialInstructions), input_name='SpecialInstructions')), namespaceprefix_ , eol_)) + if self.ConfirmationNumber is not None: + namespaceprefix_ = self.ConfirmationNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.ConfirmationNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sConfirmationNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ConfirmationNumber), input_name='ConfirmationNumber')), namespaceprefix_ , eol_)) + if self.DayOfWeek is not None: + namespaceprefix_ = self.DayOfWeek_nsprefix_ + ':' if (UseCapturedNS_ and self.DayOfWeek_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDayOfWeek>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DayOfWeek), input_name='DayOfWeek')), namespaceprefix_ , eol_)) + if self.Date is not None: + namespaceprefix_ = self.Date_nsprefix_ + ':' if (UseCapturedNS_ and self.Date_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Date), input_name='Date')), namespaceprefix_ , eol_)) + if self.Status is not None: + namespaceprefix_ = self.Status_nsprefix_ + ':' if (UseCapturedNS_ and self.Status_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sStatus>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Status), input_name='Status')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirstName') + value_ = self.gds_validate_string(value_, node, 'FirstName') + self.FirstName = value_ + self.FirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'LastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LastName') + value_ = self.gds_validate_string(value_, node, 'LastName') + self.LastName = value_ + self.LastName_nsprefix_ = child_.prefix + elif nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'SuiteOrApt': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SuiteOrApt') + value_ = self.gds_validate_string(value_, node, 'SuiteOrApt') + self.SuiteOrApt = value_ + self.SuiteOrApt_nsprefix_ = child_.prefix + elif nodeName_ == 'Address2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address2') + value_ = self.gds_validate_string(value_, node, 'Address2') + self.Address2 = value_ + self.Address2_nsprefix_ = child_.prefix + elif nodeName_ == 'Urbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Urbanization') + value_ = self.gds_validate_string(value_, node, 'Urbanization') + self.Urbanization = value_ + self.Urbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP5') + value_ = self.gds_validate_string(value_, node, 'ZIP5') + self.ZIP5 = value_ + self.ZIP5_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP4') + value_ = self.gds_validate_string(value_, node, 'ZIP4') + self.ZIP4 = value_ + self.ZIP4_nsprefix_ = child_.prefix + elif nodeName_ == 'Phone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Phone') + value_ = self.gds_validate_string(value_, node, 'Phone') + self.Phone = value_ + self.Phone_nsprefix_ = child_.prefix + elif nodeName_ == 'Extension': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Extension') + value_ = self.gds_validate_string(value_, node, 'Extension') + self.Extension = value_ + self.Extension_nsprefix_ = child_.prefix + elif nodeName_ == 'Package': + obj_ = PackageType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Package.append(obj_) + obj_.original_tagname_ = 'Package' + elif nodeName_ == 'EstimatedWeight': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EstimatedWeight') + value_ = self.gds_validate_string(value_, node, 'EstimatedWeight') + self.EstimatedWeight = value_ + self.EstimatedWeight_nsprefix_ = child_.prefix + elif nodeName_ == 'PackageLocation': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PackageLocation') + value_ = self.gds_validate_string(value_, node, 'PackageLocation') + self.PackageLocation = value_ + self.PackageLocation_nsprefix_ = child_.prefix + elif nodeName_ == 'SpecialInstructions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SpecialInstructions') + value_ = self.gds_validate_string(value_, node, 'SpecialInstructions') + self.SpecialInstructions = value_ + self.SpecialInstructions_nsprefix_ = child_.prefix + elif nodeName_ == 'ConfirmationNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ConfirmationNumber') + value_ = self.gds_validate_string(value_, node, 'ConfirmationNumber') + self.ConfirmationNumber = value_ + self.ConfirmationNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'DayOfWeek': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DayOfWeek') + value_ = self.gds_validate_string(value_, node, 'DayOfWeek') + self.DayOfWeek = value_ + self.DayOfWeek_nsprefix_ = child_.prefix + elif nodeName_ == 'Date': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Date') + value_ = self.gds_validate_string(value_, node, 'Date') + self.Date = value_ + self.Date_nsprefix_ = child_.prefix + elif nodeName_ == 'Status': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Status') + value_ = self.gds_validate_string(value_, node, 'Status') + self.Status = value_ + self.Status_nsprefix_ = child_.prefix +# end class CarrierPickupChangeResponse + + +class PackageType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ServiceType=None, Count=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ServiceType = ServiceType + self.ServiceType_nsprefix_ = None + self.Count = Count + self.Count_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PackageType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PackageType.subclass: + return PackageType.subclass(*args_, **kwargs_) + else: + return PackageType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ServiceType(self): + return self.ServiceType + def set_ServiceType(self, ServiceType): + self.ServiceType = ServiceType + def get_Count(self): + return self.Count + def set_Count(self, Count): + self.Count = Count + def has__content(self): + if ( + self.ServiceType is not None or + self.Count is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PackageType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PackageType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PackageType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PackageType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PackageType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ServiceType is not None: + namespaceprefix_ = self.ServiceType_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceType), input_name='ServiceType')), namespaceprefix_ , eol_)) + if self.Count is not None: + namespaceprefix_ = self.Count_nsprefix_ + ':' if (UseCapturedNS_ and self.Count_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCount>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Count, input_name='Count'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ServiceType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceType') + value_ = self.gds_validate_string(value_, node, 'ServiceType') + self.ServiceType = value_ + self.ServiceType_nsprefix_ = child_.prefix + elif nodeName_ == 'Count' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Count') + ival_ = self.gds_validate_integer(ival_, node, 'Count') + self.Count = ival_ + self.Count_nsprefix_ = child_.prefix +# end class PackageType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupChangeResponse' + rootClass = CarrierPickupChangeResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupChangeResponse' + rootClass = CarrierPickupChangeResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupChangeResponse' + rootClass = CarrierPickupChangeResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupChangeResponse' + rootClass = CarrierPickupChangeResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from carrier_pickup_change_response import *\n\n') + sys.stdout.write('import carrier_pickup_change_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CarrierPickupChangeResponse", + "PackageType" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/carrier_pickup_inquiry_request.py b/modules/connectors/usps/karrio/schemas/usps/carrier_pickup_inquiry_request.py new file mode 100644 index 0000000000..12d64d2011 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/carrier_pickup_inquiry_request.py @@ -0,0 +1,1471 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:39 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/carrier_pickup_inquiry_request.py') +# +# Command line arguments: +# ./schemas/CarrierPickupInquiryRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/carrier_pickup_inquiry_request.py" ./schemas/CarrierPickupInquiryRequest.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class CarrierPickupInquiryRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, FirmName=None, SuiteOrApt=None, Address2=None, Urbanization=None, City=None, State=None, ZIP5=None, ZIP4=None, ConfirmationNumber=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.SuiteOrApt = SuiteOrApt + self.SuiteOrApt_nsprefix_ = None + self.Address2 = Address2 + self.Address2_nsprefix_ = None + self.Urbanization = Urbanization + self.Urbanization_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.ZIP5 = ZIP5 + self.ZIP5_nsprefix_ = None + self.ZIP4 = ZIP4 + self.ZIP4_nsprefix_ = None + self.ConfirmationNumber = ConfirmationNumber + self.ConfirmationNumber_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CarrierPickupInquiryRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CarrierPickupInquiryRequest.subclass: + return CarrierPickupInquiryRequest.subclass(*args_, **kwargs_) + else: + return CarrierPickupInquiryRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_SuiteOrApt(self): + return self.SuiteOrApt + def set_SuiteOrApt(self, SuiteOrApt): + self.SuiteOrApt = SuiteOrApt + def get_Address2(self): + return self.Address2 + def set_Address2(self, Address2): + self.Address2 = Address2 + def get_Urbanization(self): + return self.Urbanization + def set_Urbanization(self, Urbanization): + self.Urbanization = Urbanization + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_ZIP5(self): + return self.ZIP5 + def set_ZIP5(self, ZIP5): + self.ZIP5 = ZIP5 + def get_ZIP4(self): + return self.ZIP4 + def set_ZIP4(self, ZIP4): + self.ZIP4 = ZIP4 + def get_ConfirmationNumber(self): + return self.ConfirmationNumber + def set_ConfirmationNumber(self, ConfirmationNumber): + self.ConfirmationNumber = ConfirmationNumber + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.FirmName is not None or + self.SuiteOrApt is not None or + self.Address2 is not None or + self.Urbanization is not None or + self.City is not None or + self.State is not None or + self.ZIP5 is not None or + self.ZIP4 is not None or + self.ConfirmationNumber is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupInquiryRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CarrierPickupInquiryRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CarrierPickupInquiryRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CarrierPickupInquiryRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CarrierPickupInquiryRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CarrierPickupInquiryRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupInquiryRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.SuiteOrApt is not None: + namespaceprefix_ = self.SuiteOrApt_nsprefix_ + ':' if (UseCapturedNS_ and self.SuiteOrApt_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSuiteOrApt>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SuiteOrApt), input_name='SuiteOrApt')), namespaceprefix_ , eol_)) + if self.Address2 is not None: + namespaceprefix_ = self.Address2_nsprefix_ + ':' if (UseCapturedNS_ and self.Address2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address2), input_name='Address2')), namespaceprefix_ , eol_)) + if self.Urbanization is not None: + namespaceprefix_ = self.Urbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.Urbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Urbanization), input_name='Urbanization')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.ZIP5 is not None: + namespaceprefix_ = self.ZIP5_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP5), input_name='ZIP5')), namespaceprefix_ , eol_)) + if self.ZIP4 is not None: + namespaceprefix_ = self.ZIP4_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP4), input_name='ZIP4')), namespaceprefix_ , eol_)) + if self.ConfirmationNumber is not None: + namespaceprefix_ = self.ConfirmationNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.ConfirmationNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sConfirmationNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ConfirmationNumber), input_name='ConfirmationNumber')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'SuiteOrApt': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SuiteOrApt') + value_ = self.gds_validate_string(value_, node, 'SuiteOrApt') + self.SuiteOrApt = value_ + self.SuiteOrApt_nsprefix_ = child_.prefix + elif nodeName_ == 'Address2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address2') + value_ = self.gds_validate_string(value_, node, 'Address2') + self.Address2 = value_ + self.Address2_nsprefix_ = child_.prefix + elif nodeName_ == 'Urbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Urbanization') + value_ = self.gds_validate_string(value_, node, 'Urbanization') + self.Urbanization = value_ + self.Urbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP5') + value_ = self.gds_validate_string(value_, node, 'ZIP5') + self.ZIP5 = value_ + self.ZIP5_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP4') + value_ = self.gds_validate_string(value_, node, 'ZIP4') + self.ZIP4 = value_ + self.ZIP4_nsprefix_ = child_.prefix + elif nodeName_ == 'ConfirmationNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ConfirmationNumber') + value_ = self.gds_validate_string(value_, node, 'ConfirmationNumber') + self.ConfirmationNumber = value_ + self.ConfirmationNumber_nsprefix_ = child_.prefix +# end class CarrierPickupInquiryRequest + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupInquiryRequest' + rootClass = CarrierPickupInquiryRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupInquiryRequest' + rootClass = CarrierPickupInquiryRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupInquiryRequest' + rootClass = CarrierPickupInquiryRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupInquiryRequest' + rootClass = CarrierPickupInquiryRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from carrier_pickup_inquiry_request import *\n\n') + sys.stdout.write('import carrier_pickup_inquiry_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CarrierPickupInquiryRequest" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/carrier_pickup_inquiry_response.py b/modules/connectors/usps/karrio/schemas/usps/carrier_pickup_inquiry_response.py new file mode 100644 index 0000000000..f003fb276b --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/carrier_pickup_inquiry_response.py @@ -0,0 +1,1754 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:40 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/carrier_pickup_inquiry_response.py') +# +# Command line arguments: +# ./schemas/CarrierPickupInquiryResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/carrier_pickup_inquiry_response.py" ./schemas/CarrierPickupInquiryResponse.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class CarrierPickupInquiryResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, FirstName=None, LastName=None, FirmName=None, SuiteOrApt=None, Address2=None, Urbanization=None, City=None, State=None, ZIP5=None, ZIP4=None, Phone=None, Extension=None, Package=None, EstimatedWeight=None, PackageLocation=None, SpecialInstructions=None, ConfirmationNumber=None, DayOfWeek=None, Date=None, EmailAddress=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.FirstName = FirstName + self.FirstName_nsprefix_ = None + self.LastName = LastName + self.LastName_nsprefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.SuiteOrApt = SuiteOrApt + self.SuiteOrApt_nsprefix_ = None + self.Address2 = Address2 + self.Address2_nsprefix_ = None + self.Urbanization = Urbanization + self.Urbanization_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.ZIP5 = ZIP5 + self.ZIP5_nsprefix_ = None + self.ZIP4 = ZIP4 + self.ZIP4_nsprefix_ = None + self.Phone = Phone + self.Phone_nsprefix_ = None + self.Extension = Extension + self.Extension_nsprefix_ = None + if Package is None: + self.Package = [] + else: + self.Package = Package + self.Package_nsprefix_ = None + self.EstimatedWeight = EstimatedWeight + self.EstimatedWeight_nsprefix_ = None + self.PackageLocation = PackageLocation + self.PackageLocation_nsprefix_ = None + self.SpecialInstructions = SpecialInstructions + self.SpecialInstructions_nsprefix_ = None + self.ConfirmationNumber = ConfirmationNumber + self.ConfirmationNumber_nsprefix_ = None + self.DayOfWeek = DayOfWeek + self.DayOfWeek_nsprefix_ = None + self.Date = Date + self.Date_nsprefix_ = None + self.EmailAddress = EmailAddress + self.EmailAddress_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CarrierPickupInquiryResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CarrierPickupInquiryResponse.subclass: + return CarrierPickupInquiryResponse.subclass(*args_, **kwargs_) + else: + return CarrierPickupInquiryResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FirstName(self): + return self.FirstName + def set_FirstName(self, FirstName): + self.FirstName = FirstName + def get_LastName(self): + return self.LastName + def set_LastName(self, LastName): + self.LastName = LastName + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_SuiteOrApt(self): + return self.SuiteOrApt + def set_SuiteOrApt(self, SuiteOrApt): + self.SuiteOrApt = SuiteOrApt + def get_Address2(self): + return self.Address2 + def set_Address2(self, Address2): + self.Address2 = Address2 + def get_Urbanization(self): + return self.Urbanization + def set_Urbanization(self, Urbanization): + self.Urbanization = Urbanization + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_ZIP5(self): + return self.ZIP5 + def set_ZIP5(self, ZIP5): + self.ZIP5 = ZIP5 + def get_ZIP4(self): + return self.ZIP4 + def set_ZIP4(self, ZIP4): + self.ZIP4 = ZIP4 + def get_Phone(self): + return self.Phone + def set_Phone(self, Phone): + self.Phone = Phone + def get_Extension(self): + return self.Extension + def set_Extension(self, Extension): + self.Extension = Extension + def get_Package(self): + return self.Package + def set_Package(self, Package): + self.Package = Package + def add_Package(self, value): + self.Package.append(value) + def insert_Package_at(self, index, value): + self.Package.insert(index, value) + def replace_Package_at(self, index, value): + self.Package[index] = value + def get_EstimatedWeight(self): + return self.EstimatedWeight + def set_EstimatedWeight(self, EstimatedWeight): + self.EstimatedWeight = EstimatedWeight + def get_PackageLocation(self): + return self.PackageLocation + def set_PackageLocation(self, PackageLocation): + self.PackageLocation = PackageLocation + def get_SpecialInstructions(self): + return self.SpecialInstructions + def set_SpecialInstructions(self, SpecialInstructions): + self.SpecialInstructions = SpecialInstructions + def get_ConfirmationNumber(self): + return self.ConfirmationNumber + def set_ConfirmationNumber(self, ConfirmationNumber): + self.ConfirmationNumber = ConfirmationNumber + def get_DayOfWeek(self): + return self.DayOfWeek + def set_DayOfWeek(self, DayOfWeek): + self.DayOfWeek = DayOfWeek + def get_Date(self): + return self.Date + def set_Date(self, Date): + self.Date = Date + def get_EmailAddress(self): + return self.EmailAddress + def set_EmailAddress(self, EmailAddress): + self.EmailAddress = EmailAddress + def has__content(self): + if ( + self.FirstName is not None or + self.LastName is not None or + self.FirmName is not None or + self.SuiteOrApt is not None or + self.Address2 is not None or + self.Urbanization is not None or + self.City is not None or + self.State is not None or + self.ZIP5 is not None or + self.ZIP4 is not None or + self.Phone is not None or + self.Extension is not None or + self.Package or + self.EstimatedWeight is not None or + self.PackageLocation is not None or + self.SpecialInstructions is not None or + self.ConfirmationNumber is not None or + self.DayOfWeek is not None or + self.Date is not None or + self.EmailAddress is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupInquiryResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CarrierPickupInquiryResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CarrierPickupInquiryResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CarrierPickupInquiryResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CarrierPickupInquiryResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CarrierPickupInquiryResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupInquiryResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FirstName is not None: + namespaceprefix_ = self.FirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirstName), input_name='FirstName')), namespaceprefix_ , eol_)) + if self.LastName is not None: + namespaceprefix_ = self.LastName_nsprefix_ + ':' if (UseCapturedNS_ and self.LastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LastName), input_name='LastName')), namespaceprefix_ , eol_)) + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.SuiteOrApt is not None: + namespaceprefix_ = self.SuiteOrApt_nsprefix_ + ':' if (UseCapturedNS_ and self.SuiteOrApt_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSuiteOrApt>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SuiteOrApt), input_name='SuiteOrApt')), namespaceprefix_ , eol_)) + if self.Address2 is not None: + namespaceprefix_ = self.Address2_nsprefix_ + ':' if (UseCapturedNS_ and self.Address2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address2), input_name='Address2')), namespaceprefix_ , eol_)) + if self.Urbanization is not None: + namespaceprefix_ = self.Urbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.Urbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Urbanization), input_name='Urbanization')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.ZIP5 is not None: + namespaceprefix_ = self.ZIP5_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP5), input_name='ZIP5')), namespaceprefix_ , eol_)) + if self.ZIP4 is not None: + namespaceprefix_ = self.ZIP4_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP4), input_name='ZIP4')), namespaceprefix_ , eol_)) + if self.Phone is not None: + namespaceprefix_ = self.Phone_nsprefix_ + ':' if (UseCapturedNS_ and self.Phone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Phone), input_name='Phone')), namespaceprefix_ , eol_)) + if self.Extension is not None: + namespaceprefix_ = self.Extension_nsprefix_ + ':' if (UseCapturedNS_ and self.Extension_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sExtension>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Extension), input_name='Extension')), namespaceprefix_ , eol_)) + for Package_ in self.Package: + namespaceprefix_ = self.Package_nsprefix_ + ':' if (UseCapturedNS_ and self.Package_nsprefix_) else '' + Package_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Package', pretty_print=pretty_print) + if self.EstimatedWeight is not None: + namespaceprefix_ = self.EstimatedWeight_nsprefix_ + ':' if (UseCapturedNS_ and self.EstimatedWeight_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEstimatedWeight>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EstimatedWeight), input_name='EstimatedWeight')), namespaceprefix_ , eol_)) + if self.PackageLocation is not None: + namespaceprefix_ = self.PackageLocation_nsprefix_ + ':' if (UseCapturedNS_ and self.PackageLocation_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPackageLocation>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PackageLocation), input_name='PackageLocation')), namespaceprefix_ , eol_)) + if self.SpecialInstructions is not None: + namespaceprefix_ = self.SpecialInstructions_nsprefix_ + ':' if (UseCapturedNS_ and self.SpecialInstructions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSpecialInstructions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SpecialInstructions), input_name='SpecialInstructions')), namespaceprefix_ , eol_)) + if self.ConfirmationNumber is not None: + namespaceprefix_ = self.ConfirmationNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.ConfirmationNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sConfirmationNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ConfirmationNumber), input_name='ConfirmationNumber')), namespaceprefix_ , eol_)) + if self.DayOfWeek is not None: + namespaceprefix_ = self.DayOfWeek_nsprefix_ + ':' if (UseCapturedNS_ and self.DayOfWeek_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDayOfWeek>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DayOfWeek), input_name='DayOfWeek')), namespaceprefix_ , eol_)) + if self.Date is not None: + namespaceprefix_ = self.Date_nsprefix_ + ':' if (UseCapturedNS_ and self.Date_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Date), input_name='Date')), namespaceprefix_ , eol_)) + if self.EmailAddress is not None: + namespaceprefix_ = self.EmailAddress_nsprefix_ + ':' if (UseCapturedNS_ and self.EmailAddress_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEmailAddress>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EmailAddress), input_name='EmailAddress')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirstName') + value_ = self.gds_validate_string(value_, node, 'FirstName') + self.FirstName = value_ + self.FirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'LastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LastName') + value_ = self.gds_validate_string(value_, node, 'LastName') + self.LastName = value_ + self.LastName_nsprefix_ = child_.prefix + elif nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'SuiteOrApt': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SuiteOrApt') + value_ = self.gds_validate_string(value_, node, 'SuiteOrApt') + self.SuiteOrApt = value_ + self.SuiteOrApt_nsprefix_ = child_.prefix + elif nodeName_ == 'Address2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address2') + value_ = self.gds_validate_string(value_, node, 'Address2') + self.Address2 = value_ + self.Address2_nsprefix_ = child_.prefix + elif nodeName_ == 'Urbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Urbanization') + value_ = self.gds_validate_string(value_, node, 'Urbanization') + self.Urbanization = value_ + self.Urbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP5') + value_ = self.gds_validate_string(value_, node, 'ZIP5') + self.ZIP5 = value_ + self.ZIP5_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP4') + value_ = self.gds_validate_string(value_, node, 'ZIP4') + self.ZIP4 = value_ + self.ZIP4_nsprefix_ = child_.prefix + elif nodeName_ == 'Phone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Phone') + value_ = self.gds_validate_string(value_, node, 'Phone') + self.Phone = value_ + self.Phone_nsprefix_ = child_.prefix + elif nodeName_ == 'Extension': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Extension') + value_ = self.gds_validate_string(value_, node, 'Extension') + self.Extension = value_ + self.Extension_nsprefix_ = child_.prefix + elif nodeName_ == 'Package': + obj_ = PackageType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Package.append(obj_) + obj_.original_tagname_ = 'Package' + elif nodeName_ == 'EstimatedWeight': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EstimatedWeight') + value_ = self.gds_validate_string(value_, node, 'EstimatedWeight') + self.EstimatedWeight = value_ + self.EstimatedWeight_nsprefix_ = child_.prefix + elif nodeName_ == 'PackageLocation': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PackageLocation') + value_ = self.gds_validate_string(value_, node, 'PackageLocation') + self.PackageLocation = value_ + self.PackageLocation_nsprefix_ = child_.prefix + elif nodeName_ == 'SpecialInstructions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SpecialInstructions') + value_ = self.gds_validate_string(value_, node, 'SpecialInstructions') + self.SpecialInstructions = value_ + self.SpecialInstructions_nsprefix_ = child_.prefix + elif nodeName_ == 'ConfirmationNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ConfirmationNumber') + value_ = self.gds_validate_string(value_, node, 'ConfirmationNumber') + self.ConfirmationNumber = value_ + self.ConfirmationNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'DayOfWeek': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DayOfWeek') + value_ = self.gds_validate_string(value_, node, 'DayOfWeek') + self.DayOfWeek = value_ + self.DayOfWeek_nsprefix_ = child_.prefix + elif nodeName_ == 'Date': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Date') + value_ = self.gds_validate_string(value_, node, 'Date') + self.Date = value_ + self.Date_nsprefix_ = child_.prefix + elif nodeName_ == 'EmailAddress': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EmailAddress') + value_ = self.gds_validate_string(value_, node, 'EmailAddress') + self.EmailAddress = value_ + self.EmailAddress_nsprefix_ = child_.prefix +# end class CarrierPickupInquiryResponse + + +class PackageType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ServiceType=None, Count=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ServiceType = ServiceType + self.ServiceType_nsprefix_ = None + self.Count = Count + self.Count_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PackageType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PackageType.subclass: + return PackageType.subclass(*args_, **kwargs_) + else: + return PackageType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ServiceType(self): + return self.ServiceType + def set_ServiceType(self, ServiceType): + self.ServiceType = ServiceType + def get_Count(self): + return self.Count + def set_Count(self, Count): + self.Count = Count + def has__content(self): + if ( + self.ServiceType is not None or + self.Count is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PackageType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PackageType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PackageType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PackageType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PackageType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ServiceType is not None: + namespaceprefix_ = self.ServiceType_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceType), input_name='ServiceType')), namespaceprefix_ , eol_)) + if self.Count is not None: + namespaceprefix_ = self.Count_nsprefix_ + ':' if (UseCapturedNS_ and self.Count_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCount>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Count, input_name='Count'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ServiceType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceType') + value_ = self.gds_validate_string(value_, node, 'ServiceType') + self.ServiceType = value_ + self.ServiceType_nsprefix_ = child_.prefix + elif nodeName_ == 'Count' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Count') + ival_ = self.gds_validate_integer(ival_, node, 'Count') + self.Count = ival_ + self.Count_nsprefix_ = child_.prefix +# end class PackageType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupInquiryResponse' + rootClass = CarrierPickupInquiryResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupInquiryResponse' + rootClass = CarrierPickupInquiryResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupInquiryResponse' + rootClass = CarrierPickupInquiryResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupInquiryResponse' + rootClass = CarrierPickupInquiryResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from carrier_pickup_inquiry_response import *\n\n') + sys.stdout.write('import carrier_pickup_inquiry_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CarrierPickupInquiryResponse", + "PackageType" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/carrier_pickup_schedule_request.py b/modules/connectors/usps/karrio/schemas/usps/carrier_pickup_schedule_request.py new file mode 100644 index 0000000000..bde487dfe5 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/carrier_pickup_schedule_request.py @@ -0,0 +1,1727 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:40 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/carrier_pickup_schedule_request.py') +# +# Command line arguments: +# ./schemas/CarrierPickupScheduleRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/carrier_pickup_schedule_request.py" ./schemas/CarrierPickupScheduleRequest.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class CarrierPickupScheduleRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, FirstName=None, LastName=None, FirmName=None, SuiteOrApt=None, Address2=None, Urbanization=None, City=None, State=None, ZIP5=None, ZIP4=None, Phone=None, Extension=None, Package=None, EstimatedWeight=None, PackageLocation=None, SpecialInstructions=None, EmailAddress=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.FirstName = FirstName + self.FirstName_nsprefix_ = None + self.LastName = LastName + self.LastName_nsprefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.SuiteOrApt = SuiteOrApt + self.SuiteOrApt_nsprefix_ = None + self.Address2 = Address2 + self.Address2_nsprefix_ = None + self.Urbanization = Urbanization + self.Urbanization_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.ZIP5 = ZIP5 + self.ZIP5_nsprefix_ = None + self.ZIP4 = ZIP4 + self.ZIP4_nsprefix_ = None + self.Phone = Phone + self.Phone_nsprefix_ = None + self.Extension = Extension + self.Extension_nsprefix_ = None + if Package is None: + self.Package = [] + else: + self.Package = Package + self.Package_nsprefix_ = None + self.EstimatedWeight = EstimatedWeight + self.EstimatedWeight_nsprefix_ = None + self.PackageLocation = PackageLocation + self.PackageLocation_nsprefix_ = None + self.SpecialInstructions = SpecialInstructions + self.SpecialInstructions_nsprefix_ = None + self.EmailAddress = EmailAddress + self.EmailAddress_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CarrierPickupScheduleRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CarrierPickupScheduleRequest.subclass: + return CarrierPickupScheduleRequest.subclass(*args_, **kwargs_) + else: + return CarrierPickupScheduleRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FirstName(self): + return self.FirstName + def set_FirstName(self, FirstName): + self.FirstName = FirstName + def get_LastName(self): + return self.LastName + def set_LastName(self, LastName): + self.LastName = LastName + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_SuiteOrApt(self): + return self.SuiteOrApt + def set_SuiteOrApt(self, SuiteOrApt): + self.SuiteOrApt = SuiteOrApt + def get_Address2(self): + return self.Address2 + def set_Address2(self, Address2): + self.Address2 = Address2 + def get_Urbanization(self): + return self.Urbanization + def set_Urbanization(self, Urbanization): + self.Urbanization = Urbanization + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_ZIP5(self): + return self.ZIP5 + def set_ZIP5(self, ZIP5): + self.ZIP5 = ZIP5 + def get_ZIP4(self): + return self.ZIP4 + def set_ZIP4(self, ZIP4): + self.ZIP4 = ZIP4 + def get_Phone(self): + return self.Phone + def set_Phone(self, Phone): + self.Phone = Phone + def get_Extension(self): + return self.Extension + def set_Extension(self, Extension): + self.Extension = Extension + def get_Package(self): + return self.Package + def set_Package(self, Package): + self.Package = Package + def add_Package(self, value): + self.Package.append(value) + def insert_Package_at(self, index, value): + self.Package.insert(index, value) + def replace_Package_at(self, index, value): + self.Package[index] = value + def get_EstimatedWeight(self): + return self.EstimatedWeight + def set_EstimatedWeight(self, EstimatedWeight): + self.EstimatedWeight = EstimatedWeight + def get_PackageLocation(self): + return self.PackageLocation + def set_PackageLocation(self, PackageLocation): + self.PackageLocation = PackageLocation + def get_SpecialInstructions(self): + return self.SpecialInstructions + def set_SpecialInstructions(self, SpecialInstructions): + self.SpecialInstructions = SpecialInstructions + def get_EmailAddress(self): + return self.EmailAddress + def set_EmailAddress(self, EmailAddress): + self.EmailAddress = EmailAddress + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.FirstName is not None or + self.LastName is not None or + self.FirmName is not None or + self.SuiteOrApt is not None or + self.Address2 is not None or + self.Urbanization is not None or + self.City is not None or + self.State is not None or + self.ZIP5 is not None or + self.ZIP4 is not None or + self.Phone is not None or + self.Extension is not None or + self.Package or + self.EstimatedWeight is not None or + self.PackageLocation is not None or + self.SpecialInstructions is not None or + self.EmailAddress is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupScheduleRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CarrierPickupScheduleRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CarrierPickupScheduleRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CarrierPickupScheduleRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CarrierPickupScheduleRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CarrierPickupScheduleRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupScheduleRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FirstName is not None: + namespaceprefix_ = self.FirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirstName), input_name='FirstName')), namespaceprefix_ , eol_)) + if self.LastName is not None: + namespaceprefix_ = self.LastName_nsprefix_ + ':' if (UseCapturedNS_ and self.LastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LastName), input_name='LastName')), namespaceprefix_ , eol_)) + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.SuiteOrApt is not None: + namespaceprefix_ = self.SuiteOrApt_nsprefix_ + ':' if (UseCapturedNS_ and self.SuiteOrApt_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSuiteOrApt>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SuiteOrApt), input_name='SuiteOrApt')), namespaceprefix_ , eol_)) + if self.Address2 is not None: + namespaceprefix_ = self.Address2_nsprefix_ + ':' if (UseCapturedNS_ and self.Address2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address2), input_name='Address2')), namespaceprefix_ , eol_)) + if self.Urbanization is not None: + namespaceprefix_ = self.Urbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.Urbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Urbanization), input_name='Urbanization')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.ZIP5 is not None: + namespaceprefix_ = self.ZIP5_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP5), input_name='ZIP5')), namespaceprefix_ , eol_)) + if self.ZIP4 is not None: + namespaceprefix_ = self.ZIP4_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP4), input_name='ZIP4')), namespaceprefix_ , eol_)) + if self.Phone is not None: + namespaceprefix_ = self.Phone_nsprefix_ + ':' if (UseCapturedNS_ and self.Phone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Phone), input_name='Phone')), namespaceprefix_ , eol_)) + if self.Extension is not None: + namespaceprefix_ = self.Extension_nsprefix_ + ':' if (UseCapturedNS_ and self.Extension_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sExtension>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Extension), input_name='Extension')), namespaceprefix_ , eol_)) + for Package_ in self.Package: + namespaceprefix_ = self.Package_nsprefix_ + ':' if (UseCapturedNS_ and self.Package_nsprefix_) else '' + Package_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Package', pretty_print=pretty_print) + if self.EstimatedWeight is not None: + namespaceprefix_ = self.EstimatedWeight_nsprefix_ + ':' if (UseCapturedNS_ and self.EstimatedWeight_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEstimatedWeight>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EstimatedWeight), input_name='EstimatedWeight')), namespaceprefix_ , eol_)) + if self.PackageLocation is not None: + namespaceprefix_ = self.PackageLocation_nsprefix_ + ':' if (UseCapturedNS_ and self.PackageLocation_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPackageLocation>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PackageLocation), input_name='PackageLocation')), namespaceprefix_ , eol_)) + if self.SpecialInstructions is not None: + namespaceprefix_ = self.SpecialInstructions_nsprefix_ + ':' if (UseCapturedNS_ and self.SpecialInstructions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSpecialInstructions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SpecialInstructions), input_name='SpecialInstructions')), namespaceprefix_ , eol_)) + if self.EmailAddress is not None: + namespaceprefix_ = self.EmailAddress_nsprefix_ + ':' if (UseCapturedNS_ and self.EmailAddress_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEmailAddress>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EmailAddress), input_name='EmailAddress')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirstName') + value_ = self.gds_validate_string(value_, node, 'FirstName') + self.FirstName = value_ + self.FirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'LastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LastName') + value_ = self.gds_validate_string(value_, node, 'LastName') + self.LastName = value_ + self.LastName_nsprefix_ = child_.prefix + elif nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'SuiteOrApt': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SuiteOrApt') + value_ = self.gds_validate_string(value_, node, 'SuiteOrApt') + self.SuiteOrApt = value_ + self.SuiteOrApt_nsprefix_ = child_.prefix + elif nodeName_ == 'Address2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address2') + value_ = self.gds_validate_string(value_, node, 'Address2') + self.Address2 = value_ + self.Address2_nsprefix_ = child_.prefix + elif nodeName_ == 'Urbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Urbanization') + value_ = self.gds_validate_string(value_, node, 'Urbanization') + self.Urbanization = value_ + self.Urbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP5') + value_ = self.gds_validate_string(value_, node, 'ZIP5') + self.ZIP5 = value_ + self.ZIP5_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP4') + value_ = self.gds_validate_string(value_, node, 'ZIP4') + self.ZIP4 = value_ + self.ZIP4_nsprefix_ = child_.prefix + elif nodeName_ == 'Phone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Phone') + value_ = self.gds_validate_string(value_, node, 'Phone') + self.Phone = value_ + self.Phone_nsprefix_ = child_.prefix + elif nodeName_ == 'Extension': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Extension') + value_ = self.gds_validate_string(value_, node, 'Extension') + self.Extension = value_ + self.Extension_nsprefix_ = child_.prefix + elif nodeName_ == 'Package': + obj_ = PackageType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Package.append(obj_) + obj_.original_tagname_ = 'Package' + elif nodeName_ == 'EstimatedWeight': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EstimatedWeight') + value_ = self.gds_validate_string(value_, node, 'EstimatedWeight') + self.EstimatedWeight = value_ + self.EstimatedWeight_nsprefix_ = child_.prefix + elif nodeName_ == 'PackageLocation': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PackageLocation') + value_ = self.gds_validate_string(value_, node, 'PackageLocation') + self.PackageLocation = value_ + self.PackageLocation_nsprefix_ = child_.prefix + elif nodeName_ == 'SpecialInstructions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SpecialInstructions') + value_ = self.gds_validate_string(value_, node, 'SpecialInstructions') + self.SpecialInstructions = value_ + self.SpecialInstructions_nsprefix_ = child_.prefix + elif nodeName_ == 'EmailAddress': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EmailAddress') + value_ = self.gds_validate_string(value_, node, 'EmailAddress') + self.EmailAddress = value_ + self.EmailAddress_nsprefix_ = child_.prefix +# end class CarrierPickupScheduleRequest + + +class PackageType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ServiceType=None, Count=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ServiceType = ServiceType + self.ServiceType_nsprefix_ = None + self.Count = Count + self.Count_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PackageType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PackageType.subclass: + return PackageType.subclass(*args_, **kwargs_) + else: + return PackageType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ServiceType(self): + return self.ServiceType + def set_ServiceType(self, ServiceType): + self.ServiceType = ServiceType + def get_Count(self): + return self.Count + def set_Count(self, Count): + self.Count = Count + def has__content(self): + if ( + self.ServiceType is not None or + self.Count is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PackageType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PackageType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PackageType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PackageType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PackageType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ServiceType is not None: + namespaceprefix_ = self.ServiceType_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceType), input_name='ServiceType')), namespaceprefix_ , eol_)) + if self.Count is not None: + namespaceprefix_ = self.Count_nsprefix_ + ':' if (UseCapturedNS_ and self.Count_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCount>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Count, input_name='Count'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ServiceType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceType') + value_ = self.gds_validate_string(value_, node, 'ServiceType') + self.ServiceType = value_ + self.ServiceType_nsprefix_ = child_.prefix + elif nodeName_ == 'Count' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Count') + ival_ = self.gds_validate_integer(ival_, node, 'Count') + self.Count = ival_ + self.Count_nsprefix_ = child_.prefix +# end class PackageType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupScheduleRequest' + rootClass = CarrierPickupScheduleRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupScheduleRequest' + rootClass = CarrierPickupScheduleRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupScheduleRequest' + rootClass = CarrierPickupScheduleRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupScheduleRequest' + rootClass = CarrierPickupScheduleRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from carrier_pickup_schedule_request import *\n\n') + sys.stdout.write('import carrier_pickup_schedule_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CarrierPickupScheduleRequest", + "PackageType" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/carrier_pickup_schedule_response.py b/modules/connectors/usps/karrio/schemas/usps/carrier_pickup_schedule_response.py new file mode 100644 index 0000000000..476a92287f --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/carrier_pickup_schedule_response.py @@ -0,0 +1,1754 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:40 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/carrier_pickup_schedule_response.py') +# +# Command line arguments: +# ./schemas/CarrierPickupScheduleResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/carrier_pickup_schedule_response.py" ./schemas/CarrierPickupScheduleResponse.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class CarrierPickupScheduleResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, FirstName=None, LastName=None, FirmName=None, SuiteOrApt=None, Address2=None, Urbanization=None, City=None, State=None, ZIP5=None, ZIP4=None, Phone=None, Extension=None, Package=None, EstimatedWeight=None, PackageLocation=None, SpecialInstructions=None, ConfirmationNumber=None, DayOfWeek=None, Date=None, CarrierRoute=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.FirstName = FirstName + self.FirstName_nsprefix_ = None + self.LastName = LastName + self.LastName_nsprefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.SuiteOrApt = SuiteOrApt + self.SuiteOrApt_nsprefix_ = None + self.Address2 = Address2 + self.Address2_nsprefix_ = None + self.Urbanization = Urbanization + self.Urbanization_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.ZIP5 = ZIP5 + self.ZIP5_nsprefix_ = None + self.ZIP4 = ZIP4 + self.ZIP4_nsprefix_ = None + self.Phone = Phone + self.Phone_nsprefix_ = None + self.Extension = Extension + self.Extension_nsprefix_ = None + if Package is None: + self.Package = [] + else: + self.Package = Package + self.Package_nsprefix_ = None + self.EstimatedWeight = EstimatedWeight + self.EstimatedWeight_nsprefix_ = None + self.PackageLocation = PackageLocation + self.PackageLocation_nsprefix_ = None + self.SpecialInstructions = SpecialInstructions + self.SpecialInstructions_nsprefix_ = None + self.ConfirmationNumber = ConfirmationNumber + self.ConfirmationNumber_nsprefix_ = None + self.DayOfWeek = DayOfWeek + self.DayOfWeek_nsprefix_ = None + self.Date = Date + self.Date_nsprefix_ = None + self.CarrierRoute = CarrierRoute + self.CarrierRoute_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CarrierPickupScheduleResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CarrierPickupScheduleResponse.subclass: + return CarrierPickupScheduleResponse.subclass(*args_, **kwargs_) + else: + return CarrierPickupScheduleResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FirstName(self): + return self.FirstName + def set_FirstName(self, FirstName): + self.FirstName = FirstName + def get_LastName(self): + return self.LastName + def set_LastName(self, LastName): + self.LastName = LastName + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_SuiteOrApt(self): + return self.SuiteOrApt + def set_SuiteOrApt(self, SuiteOrApt): + self.SuiteOrApt = SuiteOrApt + def get_Address2(self): + return self.Address2 + def set_Address2(self, Address2): + self.Address2 = Address2 + def get_Urbanization(self): + return self.Urbanization + def set_Urbanization(self, Urbanization): + self.Urbanization = Urbanization + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_ZIP5(self): + return self.ZIP5 + def set_ZIP5(self, ZIP5): + self.ZIP5 = ZIP5 + def get_ZIP4(self): + return self.ZIP4 + def set_ZIP4(self, ZIP4): + self.ZIP4 = ZIP4 + def get_Phone(self): + return self.Phone + def set_Phone(self, Phone): + self.Phone = Phone + def get_Extension(self): + return self.Extension + def set_Extension(self, Extension): + self.Extension = Extension + def get_Package(self): + return self.Package + def set_Package(self, Package): + self.Package = Package + def add_Package(self, value): + self.Package.append(value) + def insert_Package_at(self, index, value): + self.Package.insert(index, value) + def replace_Package_at(self, index, value): + self.Package[index] = value + def get_EstimatedWeight(self): + return self.EstimatedWeight + def set_EstimatedWeight(self, EstimatedWeight): + self.EstimatedWeight = EstimatedWeight + def get_PackageLocation(self): + return self.PackageLocation + def set_PackageLocation(self, PackageLocation): + self.PackageLocation = PackageLocation + def get_SpecialInstructions(self): + return self.SpecialInstructions + def set_SpecialInstructions(self, SpecialInstructions): + self.SpecialInstructions = SpecialInstructions + def get_ConfirmationNumber(self): + return self.ConfirmationNumber + def set_ConfirmationNumber(self, ConfirmationNumber): + self.ConfirmationNumber = ConfirmationNumber + def get_DayOfWeek(self): + return self.DayOfWeek + def set_DayOfWeek(self, DayOfWeek): + self.DayOfWeek = DayOfWeek + def get_Date(self): + return self.Date + def set_Date(self, Date): + self.Date = Date + def get_CarrierRoute(self): + return self.CarrierRoute + def set_CarrierRoute(self, CarrierRoute): + self.CarrierRoute = CarrierRoute + def has__content(self): + if ( + self.FirstName is not None or + self.LastName is not None or + self.FirmName is not None or + self.SuiteOrApt is not None or + self.Address2 is not None or + self.Urbanization is not None or + self.City is not None or + self.State is not None or + self.ZIP5 is not None or + self.ZIP4 is not None or + self.Phone is not None or + self.Extension is not None or + self.Package or + self.EstimatedWeight is not None or + self.PackageLocation is not None or + self.SpecialInstructions is not None or + self.ConfirmationNumber is not None or + self.DayOfWeek is not None or + self.Date is not None or + self.CarrierRoute is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupScheduleResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CarrierPickupScheduleResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CarrierPickupScheduleResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CarrierPickupScheduleResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CarrierPickupScheduleResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CarrierPickupScheduleResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupScheduleResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FirstName is not None: + namespaceprefix_ = self.FirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirstName), input_name='FirstName')), namespaceprefix_ , eol_)) + if self.LastName is not None: + namespaceprefix_ = self.LastName_nsprefix_ + ':' if (UseCapturedNS_ and self.LastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LastName), input_name='LastName')), namespaceprefix_ , eol_)) + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.SuiteOrApt is not None: + namespaceprefix_ = self.SuiteOrApt_nsprefix_ + ':' if (UseCapturedNS_ and self.SuiteOrApt_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSuiteOrApt>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SuiteOrApt), input_name='SuiteOrApt')), namespaceprefix_ , eol_)) + if self.Address2 is not None: + namespaceprefix_ = self.Address2_nsprefix_ + ':' if (UseCapturedNS_ and self.Address2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address2), input_name='Address2')), namespaceprefix_ , eol_)) + if self.Urbanization is not None: + namespaceprefix_ = self.Urbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.Urbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Urbanization), input_name='Urbanization')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.ZIP5 is not None: + namespaceprefix_ = self.ZIP5_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP5), input_name='ZIP5')), namespaceprefix_ , eol_)) + if self.ZIP4 is not None: + namespaceprefix_ = self.ZIP4_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP4), input_name='ZIP4')), namespaceprefix_ , eol_)) + if self.Phone is not None: + namespaceprefix_ = self.Phone_nsprefix_ + ':' if (UseCapturedNS_ and self.Phone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Phone), input_name='Phone')), namespaceprefix_ , eol_)) + if self.Extension is not None: + namespaceprefix_ = self.Extension_nsprefix_ + ':' if (UseCapturedNS_ and self.Extension_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sExtension>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Extension), input_name='Extension')), namespaceprefix_ , eol_)) + for Package_ in self.Package: + namespaceprefix_ = self.Package_nsprefix_ + ':' if (UseCapturedNS_ and self.Package_nsprefix_) else '' + Package_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Package', pretty_print=pretty_print) + if self.EstimatedWeight is not None: + namespaceprefix_ = self.EstimatedWeight_nsprefix_ + ':' if (UseCapturedNS_ and self.EstimatedWeight_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEstimatedWeight>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EstimatedWeight), input_name='EstimatedWeight')), namespaceprefix_ , eol_)) + if self.PackageLocation is not None: + namespaceprefix_ = self.PackageLocation_nsprefix_ + ':' if (UseCapturedNS_ and self.PackageLocation_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPackageLocation>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PackageLocation), input_name='PackageLocation')), namespaceprefix_ , eol_)) + if self.SpecialInstructions is not None: + namespaceprefix_ = self.SpecialInstructions_nsprefix_ + ':' if (UseCapturedNS_ and self.SpecialInstructions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSpecialInstructions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SpecialInstructions), input_name='SpecialInstructions')), namespaceprefix_ , eol_)) + if self.ConfirmationNumber is not None: + namespaceprefix_ = self.ConfirmationNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.ConfirmationNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sConfirmationNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ConfirmationNumber), input_name='ConfirmationNumber')), namespaceprefix_ , eol_)) + if self.DayOfWeek is not None: + namespaceprefix_ = self.DayOfWeek_nsprefix_ + ':' if (UseCapturedNS_ and self.DayOfWeek_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDayOfWeek>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DayOfWeek), input_name='DayOfWeek')), namespaceprefix_ , eol_)) + if self.Date is not None: + namespaceprefix_ = self.Date_nsprefix_ + ':' if (UseCapturedNS_ and self.Date_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Date), input_name='Date')), namespaceprefix_ , eol_)) + if self.CarrierRoute is not None: + namespaceprefix_ = self.CarrierRoute_nsprefix_ + ':' if (UseCapturedNS_ and self.CarrierRoute_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCarrierRoute>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CarrierRoute), input_name='CarrierRoute')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirstName') + value_ = self.gds_validate_string(value_, node, 'FirstName') + self.FirstName = value_ + self.FirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'LastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LastName') + value_ = self.gds_validate_string(value_, node, 'LastName') + self.LastName = value_ + self.LastName_nsprefix_ = child_.prefix + elif nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'SuiteOrApt': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SuiteOrApt') + value_ = self.gds_validate_string(value_, node, 'SuiteOrApt') + self.SuiteOrApt = value_ + self.SuiteOrApt_nsprefix_ = child_.prefix + elif nodeName_ == 'Address2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address2') + value_ = self.gds_validate_string(value_, node, 'Address2') + self.Address2 = value_ + self.Address2_nsprefix_ = child_.prefix + elif nodeName_ == 'Urbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Urbanization') + value_ = self.gds_validate_string(value_, node, 'Urbanization') + self.Urbanization = value_ + self.Urbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP5') + value_ = self.gds_validate_string(value_, node, 'ZIP5') + self.ZIP5 = value_ + self.ZIP5_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP4') + value_ = self.gds_validate_string(value_, node, 'ZIP4') + self.ZIP4 = value_ + self.ZIP4_nsprefix_ = child_.prefix + elif nodeName_ == 'Phone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Phone') + value_ = self.gds_validate_string(value_, node, 'Phone') + self.Phone = value_ + self.Phone_nsprefix_ = child_.prefix + elif nodeName_ == 'Extension': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Extension') + value_ = self.gds_validate_string(value_, node, 'Extension') + self.Extension = value_ + self.Extension_nsprefix_ = child_.prefix + elif nodeName_ == 'Package': + obj_ = PackageType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Package.append(obj_) + obj_.original_tagname_ = 'Package' + elif nodeName_ == 'EstimatedWeight': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EstimatedWeight') + value_ = self.gds_validate_string(value_, node, 'EstimatedWeight') + self.EstimatedWeight = value_ + self.EstimatedWeight_nsprefix_ = child_.prefix + elif nodeName_ == 'PackageLocation': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PackageLocation') + value_ = self.gds_validate_string(value_, node, 'PackageLocation') + self.PackageLocation = value_ + self.PackageLocation_nsprefix_ = child_.prefix + elif nodeName_ == 'SpecialInstructions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SpecialInstructions') + value_ = self.gds_validate_string(value_, node, 'SpecialInstructions') + self.SpecialInstructions = value_ + self.SpecialInstructions_nsprefix_ = child_.prefix + elif nodeName_ == 'ConfirmationNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ConfirmationNumber') + value_ = self.gds_validate_string(value_, node, 'ConfirmationNumber') + self.ConfirmationNumber = value_ + self.ConfirmationNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'DayOfWeek': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DayOfWeek') + value_ = self.gds_validate_string(value_, node, 'DayOfWeek') + self.DayOfWeek = value_ + self.DayOfWeek_nsprefix_ = child_.prefix + elif nodeName_ == 'Date': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Date') + value_ = self.gds_validate_string(value_, node, 'Date') + self.Date = value_ + self.Date_nsprefix_ = child_.prefix + elif nodeName_ == 'CarrierRoute': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CarrierRoute') + value_ = self.gds_validate_string(value_, node, 'CarrierRoute') + self.CarrierRoute = value_ + self.CarrierRoute_nsprefix_ = child_.prefix +# end class CarrierPickupScheduleResponse + + +class PackageType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ServiceType=None, Count=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ServiceType = ServiceType + self.ServiceType_nsprefix_ = None + self.Count = Count + self.Count_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PackageType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PackageType.subclass: + return PackageType.subclass(*args_, **kwargs_) + else: + return PackageType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ServiceType(self): + return self.ServiceType + def set_ServiceType(self, ServiceType): + self.ServiceType = ServiceType + def get_Count(self): + return self.Count + def set_Count(self, Count): + self.Count = Count + def has__content(self): + if ( + self.ServiceType is not None or + self.Count is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PackageType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PackageType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PackageType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PackageType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PackageType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ServiceType is not None: + namespaceprefix_ = self.ServiceType_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceType), input_name='ServiceType')), namespaceprefix_ , eol_)) + if self.Count is not None: + namespaceprefix_ = self.Count_nsprefix_ + ':' if (UseCapturedNS_ and self.Count_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCount>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Count, input_name='Count'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ServiceType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceType') + value_ = self.gds_validate_string(value_, node, 'ServiceType') + self.ServiceType = value_ + self.ServiceType_nsprefix_ = child_.prefix + elif nodeName_ == 'Count' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Count') + ival_ = self.gds_validate_integer(ival_, node, 'Count') + self.Count = ival_ + self.Count_nsprefix_ = child_.prefix +# end class PackageType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupScheduleResponse' + rootClass = CarrierPickupScheduleResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupScheduleResponse' + rootClass = CarrierPickupScheduleResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupScheduleResponse' + rootClass = CarrierPickupScheduleResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupScheduleResponse' + rootClass = CarrierPickupScheduleResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from carrier_pickup_schedule_response import *\n\n') + sys.stdout.write('import carrier_pickup_schedule_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CarrierPickupScheduleResponse", + "PackageType" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/city_state_lookup_request.py b/modules/connectors/usps/karrio/schemas/usps/city_state_lookup_request.py new file mode 100644 index 0000000000..59fc283d00 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/city_state_lookup_request.py @@ -0,0 +1,1440 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:40 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/city_state_lookup_request.py') +# +# Command line arguments: +# ./schemas/CityStateLookupRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/city_state_lookup_request.py" ./schemas/CityStateLookupRequest.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class CityStateLookupRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, ZipCode=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.ZipCode = ZipCode + self.ZipCode_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CityStateLookupRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CityStateLookupRequest.subclass: + return CityStateLookupRequest.subclass(*args_, **kwargs_) + else: + return CityStateLookupRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ZipCode(self): + return self.ZipCode + def set_ZipCode(self, ZipCode): + self.ZipCode = ZipCode + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.ZipCode is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CityStateLookupRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CityStateLookupRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CityStateLookupRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CityStateLookupRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CityStateLookupRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CityStateLookupRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CityStateLookupRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ZipCode is not None: + namespaceprefix_ = self.ZipCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ZipCode_nsprefix_) else '' + self.ZipCode.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ZipCode', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ZipCode': + obj_ = ZipCodeType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ZipCode = obj_ + obj_.original_tagname_ = 'ZipCode' +# end class CityStateLookupRequest + + +class ZipCodeType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ID=None, Zip5=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ID = _cast(int, ID) + self.ID_nsprefix_ = None + self.Zip5 = Zip5 + self.Zip5_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ZipCodeType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ZipCodeType.subclass: + return ZipCodeType.subclass(*args_, **kwargs_) + else: + return ZipCodeType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Zip5(self): + return self.Zip5 + def set_Zip5(self, Zip5): + self.Zip5 = Zip5 + def get_ID(self): + return self.ID + def set_ID(self, ID): + self.ID = ID + def has__content(self): + if ( + self.Zip5 is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ZipCodeType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ZipCodeType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ZipCodeType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ZipCodeType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ZipCodeType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ZipCodeType'): + if self.ID is not None and 'ID' not in already_processed: + already_processed.add('ID') + outfile.write(' ID="%s"' % self.gds_format_integer(self.ID, input_name='ID')) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ZipCodeType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Zip5 is not None: + namespaceprefix_ = self.Zip5_nsprefix_ + ':' if (UseCapturedNS_ and self.Zip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZip5>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Zip5, input_name='Zip5'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ID', node) + if value is not None and 'ID' not in already_processed: + already_processed.add('ID') + self.ID = self.gds_parse_integer(value, node, 'ID') + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Zip5' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Zip5') + ival_ = self.gds_validate_integer(ival_, node, 'Zip5') + self.Zip5 = ival_ + self.Zip5_nsprefix_ = child_.prefix +# end class ZipCodeType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CityStateLookupRequest' + rootClass = CityStateLookupRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CityStateLookupRequest' + rootClass = CityStateLookupRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CityStateLookupRequest' + rootClass = CityStateLookupRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CityStateLookupRequest' + rootClass = CityStateLookupRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from city_state_lookup_request import *\n\n') + sys.stdout.write('import city_state_lookup_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CityStateLookupRequest", + "ZipCodeType" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/city_state_lookup_response.py b/modules/connectors/usps/karrio/schemas/usps/city_state_lookup_response.py new file mode 100644 index 0000000000..e74c048433 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/city_state_lookup_response.py @@ -0,0 +1,1459 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:40 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/city_state_lookup_response.py') +# +# Command line arguments: +# ./schemas/CityStateLookupResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/city_state_lookup_response.py" ./schemas/CityStateLookupResponse.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class CityStateLookupResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ZipCode=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if ZipCode is None: + self.ZipCode = [] + else: + self.ZipCode = ZipCode + self.ZipCode_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CityStateLookupResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CityStateLookupResponse.subclass: + return CityStateLookupResponse.subclass(*args_, **kwargs_) + else: + return CityStateLookupResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ZipCode(self): + return self.ZipCode + def set_ZipCode(self, ZipCode): + self.ZipCode = ZipCode + def add_ZipCode(self, value): + self.ZipCode.append(value) + def insert_ZipCode_at(self, index, value): + self.ZipCode.insert(index, value) + def replace_ZipCode_at(self, index, value): + self.ZipCode[index] = value + def has__content(self): + if ( + self.ZipCode + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CityStateLookupResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CityStateLookupResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CityStateLookupResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CityStateLookupResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CityStateLookupResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CityStateLookupResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CityStateLookupResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for ZipCode_ in self.ZipCode: + namespaceprefix_ = self.ZipCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ZipCode_nsprefix_) else '' + ZipCode_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ZipCode', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ZipCode': + obj_ = ZipCodeType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ZipCode.append(obj_) + obj_.original_tagname_ = 'ZipCode' +# end class CityStateLookupResponse + + +class ZipCodeType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ID=None, Zip5=None, City=None, State=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ID = _cast(int, ID) + self.ID_nsprefix_ = None + self.Zip5 = Zip5 + self.Zip5_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ZipCodeType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ZipCodeType.subclass: + return ZipCodeType.subclass(*args_, **kwargs_) + else: + return ZipCodeType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Zip5(self): + return self.Zip5 + def set_Zip5(self, Zip5): + self.Zip5 = Zip5 + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_ID(self): + return self.ID + def set_ID(self, ID): + self.ID = ID + def has__content(self): + if ( + self.Zip5 is not None or + self.City is not None or + self.State is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ZipCodeType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ZipCodeType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ZipCodeType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ZipCodeType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ZipCodeType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ZipCodeType'): + if self.ID is not None and 'ID' not in already_processed: + already_processed.add('ID') + outfile.write(' ID="%s"' % self.gds_format_integer(self.ID, input_name='ID')) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ZipCodeType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Zip5 is not None: + namespaceprefix_ = self.Zip5_nsprefix_ + ':' if (UseCapturedNS_ and self.Zip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZip5>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Zip5, input_name='Zip5'), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ID', node) + if value is not None and 'ID' not in already_processed: + already_processed.add('ID') + self.ID = self.gds_parse_integer(value, node, 'ID') + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Zip5' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Zip5') + ival_ = self.gds_validate_integer(ival_, node, 'Zip5') + self.Zip5 = ival_ + self.Zip5_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix +# end class ZipCodeType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CityStateLookupResponse' + rootClass = CityStateLookupResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CityStateLookupResponse' + rootClass = CityStateLookupResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CityStateLookupResponse' + rootClass = CityStateLookupResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CityStateLookupResponse' + rootClass = CityStateLookupResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from city_state_lookup_response import *\n\n') + sys.stdout.write('import city_state_lookup_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CityStateLookupResponse", + "ZipCodeType" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/emrsv4_0_bulk_request.py b/modules/connectors/usps/karrio/schemas/usps/emrsv4_0_bulk_request.py new file mode 100644 index 0000000000..4037706a11 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/emrsv4_0_bulk_request.py @@ -0,0 +1,1846 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:41 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/emrsv4_0_bulk_request.py') +# +# Command line arguments: +# ./schemas/EMRSV4.0BulkRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/emrsv4_0_bulk_request.py" ./schemas/EMRSV4.0BulkRequest.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class EMRSV4_0BulkRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, Option=None, LabelCount=None, ImageParameters=None, RetailerName=None, RetailerAddress=None, PermitNumber=None, PermitIssuingPOCity=None, PermitIssuingPOState=None, PermitIssuingPOZip5=None, PDUFirmName=None, PDUPOBox=None, PDUCity=None, PDUState=None, PDUZip5=None, PDUZip4=None, ServiceType=None, DeliveryConfirmation=None, InsuranceValue=None, MailingAckPackageID=None, WeightInPounds=None, WeightInOunces=None, RMA=None, RMAPICFlag=None, ImageType=None, RMABarcode=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.Option = Option + self.Option_nsprefix_ = None + self.LabelCount = LabelCount + self.LabelCount_nsprefix_ = None + self.ImageParameters = ImageParameters + self.ImageParameters_nsprefix_ = None + self.RetailerName = RetailerName + self.RetailerName_nsprefix_ = None + self.RetailerAddress = RetailerAddress + self.RetailerAddress_nsprefix_ = None + self.PermitNumber = PermitNumber + self.PermitNumber_nsprefix_ = None + self.PermitIssuingPOCity = PermitIssuingPOCity + self.PermitIssuingPOCity_nsprefix_ = None + self.PermitIssuingPOState = PermitIssuingPOState + self.PermitIssuingPOState_nsprefix_ = None + self.PermitIssuingPOZip5 = PermitIssuingPOZip5 + self.PermitIssuingPOZip5_nsprefix_ = None + self.PDUFirmName = PDUFirmName + self.PDUFirmName_nsprefix_ = None + self.PDUPOBox = PDUPOBox + self.PDUPOBox_nsprefix_ = None + self.PDUCity = PDUCity + self.PDUCity_nsprefix_ = None + self.PDUState = PDUState + self.PDUState_nsprefix_ = None + self.PDUZip5 = PDUZip5 + self.PDUZip5_nsprefix_ = None + self.PDUZip4 = PDUZip4 + self.PDUZip4_nsprefix_ = None + self.ServiceType = ServiceType + self.ServiceType_nsprefix_ = None + self.DeliveryConfirmation = DeliveryConfirmation + self.DeliveryConfirmation_nsprefix_ = None + self.InsuranceValue = InsuranceValue + self.InsuranceValue_nsprefix_ = None + self.MailingAckPackageID = MailingAckPackageID + self.MailingAckPackageID_nsprefix_ = None + self.WeightInPounds = WeightInPounds + self.WeightInPounds_nsprefix_ = None + self.WeightInOunces = WeightInOunces + self.WeightInOunces_nsprefix_ = None + self.RMA = RMA + self.RMA_nsprefix_ = None + self.RMAPICFlag = RMAPICFlag + self.RMAPICFlag_nsprefix_ = None + self.ImageType = ImageType + self.ImageType_nsprefix_ = None + self.RMABarcode = RMABarcode + self.RMABarcode_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, EMRSV4_0BulkRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if EMRSV4_0BulkRequest.subclass: + return EMRSV4_0BulkRequest.subclass(*args_, **kwargs_) + else: + return EMRSV4_0BulkRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Option(self): + return self.Option + def set_Option(self, Option): + self.Option = Option + def get_LabelCount(self): + return self.LabelCount + def set_LabelCount(self, LabelCount): + self.LabelCount = LabelCount + def get_ImageParameters(self): + return self.ImageParameters + def set_ImageParameters(self, ImageParameters): + self.ImageParameters = ImageParameters + def get_RetailerName(self): + return self.RetailerName + def set_RetailerName(self, RetailerName): + self.RetailerName = RetailerName + def get_RetailerAddress(self): + return self.RetailerAddress + def set_RetailerAddress(self, RetailerAddress): + self.RetailerAddress = RetailerAddress + def get_PermitNumber(self): + return self.PermitNumber + def set_PermitNumber(self, PermitNumber): + self.PermitNumber = PermitNumber + def get_PermitIssuingPOCity(self): + return self.PermitIssuingPOCity + def set_PermitIssuingPOCity(self, PermitIssuingPOCity): + self.PermitIssuingPOCity = PermitIssuingPOCity + def get_PermitIssuingPOState(self): + return self.PermitIssuingPOState + def set_PermitIssuingPOState(self, PermitIssuingPOState): + self.PermitIssuingPOState = PermitIssuingPOState + def get_PermitIssuingPOZip5(self): + return self.PermitIssuingPOZip5 + def set_PermitIssuingPOZip5(self, PermitIssuingPOZip5): + self.PermitIssuingPOZip5 = PermitIssuingPOZip5 + def get_PDUFirmName(self): + return self.PDUFirmName + def set_PDUFirmName(self, PDUFirmName): + self.PDUFirmName = PDUFirmName + def get_PDUPOBox(self): + return self.PDUPOBox + def set_PDUPOBox(self, PDUPOBox): + self.PDUPOBox = PDUPOBox + def get_PDUCity(self): + return self.PDUCity + def set_PDUCity(self, PDUCity): + self.PDUCity = PDUCity + def get_PDUState(self): + return self.PDUState + def set_PDUState(self, PDUState): + self.PDUState = PDUState + def get_PDUZip5(self): + return self.PDUZip5 + def set_PDUZip5(self, PDUZip5): + self.PDUZip5 = PDUZip5 + def get_PDUZip4(self): + return self.PDUZip4 + def set_PDUZip4(self, PDUZip4): + self.PDUZip4 = PDUZip4 + def get_ServiceType(self): + return self.ServiceType + def set_ServiceType(self, ServiceType): + self.ServiceType = ServiceType + def get_DeliveryConfirmation(self): + return self.DeliveryConfirmation + def set_DeliveryConfirmation(self, DeliveryConfirmation): + self.DeliveryConfirmation = DeliveryConfirmation + def get_InsuranceValue(self): + return self.InsuranceValue + def set_InsuranceValue(self, InsuranceValue): + self.InsuranceValue = InsuranceValue + def get_MailingAckPackageID(self): + return self.MailingAckPackageID + def set_MailingAckPackageID(self, MailingAckPackageID): + self.MailingAckPackageID = MailingAckPackageID + def get_WeightInPounds(self): + return self.WeightInPounds + def set_WeightInPounds(self, WeightInPounds): + self.WeightInPounds = WeightInPounds + def get_WeightInOunces(self): + return self.WeightInOunces + def set_WeightInOunces(self, WeightInOunces): + self.WeightInOunces = WeightInOunces + def get_RMA(self): + return self.RMA + def set_RMA(self, RMA): + self.RMA = RMA + def get_RMAPICFlag(self): + return self.RMAPICFlag + def set_RMAPICFlag(self, RMAPICFlag): + self.RMAPICFlag = RMAPICFlag + def get_ImageType(self): + return self.ImageType + def set_ImageType(self, ImageType): + self.ImageType = ImageType + def get_RMABarcode(self): + return self.RMABarcode + def set_RMABarcode(self, RMABarcode): + self.RMABarcode = RMABarcode + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.Option is not None or + self.LabelCount is not None or + self.ImageParameters is not None or + self.RetailerName is not None or + self.RetailerAddress is not None or + self.PermitNumber is not None or + self.PermitIssuingPOCity is not None or + self.PermitIssuingPOState is not None or + self.PermitIssuingPOZip5 is not None or + self.PDUFirmName is not None or + self.PDUPOBox is not None or + self.PDUCity is not None or + self.PDUState is not None or + self.PDUZip5 is not None or + self.PDUZip4 is not None or + self.ServiceType is not None or + self.DeliveryConfirmation is not None or + self.InsuranceValue is not None or + self.MailingAckPackageID is not None or + self.WeightInPounds is not None or + self.WeightInOunces is not None or + self.RMA is not None or + self.RMAPICFlag is not None or + self.ImageType is not None or + self.RMABarcode is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='EMRSV4.0BulkRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('EMRSV4.0BulkRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'EMRSV4.0BulkRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='EMRSV4.0BulkRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='EMRSV4.0BulkRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='EMRSV4.0BulkRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='EMRSV4.0BulkRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Option is not None: + namespaceprefix_ = self.Option_nsprefix_ + ':' if (UseCapturedNS_ and self.Option_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOption>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Option), input_name='Option')), namespaceprefix_ , eol_)) + if self.LabelCount is not None: + namespaceprefix_ = self.LabelCount_nsprefix_ + ':' if (UseCapturedNS_ and self.LabelCount_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLabelCount>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.LabelCount, input_name='LabelCount'), namespaceprefix_ , eol_)) + if self.ImageParameters is not None: + namespaceprefix_ = self.ImageParameters_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageParameters_nsprefix_) else '' + self.ImageParameters.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ImageParameters', pretty_print=pretty_print) + if self.RetailerName is not None: + namespaceprefix_ = self.RetailerName_nsprefix_ + ':' if (UseCapturedNS_ and self.RetailerName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRetailerName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RetailerName), input_name='RetailerName')), namespaceprefix_ , eol_)) + if self.RetailerAddress is not None: + namespaceprefix_ = self.RetailerAddress_nsprefix_ + ':' if (UseCapturedNS_ and self.RetailerAddress_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRetailerAddress>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RetailerAddress), input_name='RetailerAddress')), namespaceprefix_ , eol_)) + if self.PermitNumber is not None: + namespaceprefix_ = self.PermitNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitNumber>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.PermitNumber, input_name='PermitNumber'), namespaceprefix_ , eol_)) + if self.PermitIssuingPOCity is not None: + namespaceprefix_ = self.PermitIssuingPOCity_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitIssuingPOCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitIssuingPOCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PermitIssuingPOCity), input_name='PermitIssuingPOCity')), namespaceprefix_ , eol_)) + if self.PermitIssuingPOState is not None: + namespaceprefix_ = self.PermitIssuingPOState_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitIssuingPOState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitIssuingPOState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PermitIssuingPOState), input_name='PermitIssuingPOState')), namespaceprefix_ , eol_)) + if self.PermitIssuingPOZip5 is not None: + namespaceprefix_ = self.PermitIssuingPOZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitIssuingPOZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitIssuingPOZip5>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.PermitIssuingPOZip5, input_name='PermitIssuingPOZip5'), namespaceprefix_ , eol_)) + if self.PDUFirmName is not None: + namespaceprefix_ = self.PDUFirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.PDUFirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPDUFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PDUFirmName), input_name='PDUFirmName')), namespaceprefix_ , eol_)) + if self.PDUPOBox is not None: + namespaceprefix_ = self.PDUPOBox_nsprefix_ + ':' if (UseCapturedNS_ and self.PDUPOBox_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPDUPOBox>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PDUPOBox), input_name='PDUPOBox')), namespaceprefix_ , eol_)) + if self.PDUCity is not None: + namespaceprefix_ = self.PDUCity_nsprefix_ + ':' if (UseCapturedNS_ and self.PDUCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPDUCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PDUCity), input_name='PDUCity')), namespaceprefix_ , eol_)) + if self.PDUState is not None: + namespaceprefix_ = self.PDUState_nsprefix_ + ':' if (UseCapturedNS_ and self.PDUState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPDUState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PDUState), input_name='PDUState')), namespaceprefix_ , eol_)) + if self.PDUZip5 is not None: + namespaceprefix_ = self.PDUZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.PDUZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPDUZip5>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.PDUZip5, input_name='PDUZip5'), namespaceprefix_ , eol_)) + if self.PDUZip4 is not None: + namespaceprefix_ = self.PDUZip4_nsprefix_ + ':' if (UseCapturedNS_ and self.PDUZip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPDUZip4>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.PDUZip4, input_name='PDUZip4'), namespaceprefix_ , eol_)) + if self.ServiceType is not None: + namespaceprefix_ = self.ServiceType_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceType), input_name='ServiceType')), namespaceprefix_ , eol_)) + if self.DeliveryConfirmation is not None: + namespaceprefix_ = self.DeliveryConfirmation_nsprefix_ + ':' if (UseCapturedNS_ and self.DeliveryConfirmation_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDeliveryConfirmation>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DeliveryConfirmation), input_name='DeliveryConfirmation')), namespaceprefix_ , eol_)) + if self.InsuranceValue is not None: + namespaceprefix_ = self.InsuranceValue_nsprefix_ + ':' if (UseCapturedNS_ and self.InsuranceValue_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInsuranceValue>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InsuranceValue), input_name='InsuranceValue')), namespaceprefix_ , eol_)) + if self.MailingAckPackageID is not None: + namespaceprefix_ = self.MailingAckPackageID_nsprefix_ + ':' if (UseCapturedNS_ and self.MailingAckPackageID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMailingAckPackageID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MailingAckPackageID), input_name='MailingAckPackageID')), namespaceprefix_ , eol_)) + if self.WeightInPounds is not None: + namespaceprefix_ = self.WeightInPounds_nsprefix_ + ':' if (UseCapturedNS_ and self.WeightInPounds_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sWeightInPounds>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.WeightInPounds, input_name='WeightInPounds'), namespaceprefix_ , eol_)) + if self.WeightInOunces is not None: + namespaceprefix_ = self.WeightInOunces_nsprefix_ + ':' if (UseCapturedNS_ and self.WeightInOunces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sWeightInOunces>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.WeightInOunces, input_name='WeightInOunces'), namespaceprefix_ , eol_)) + if self.RMA is not None: + namespaceprefix_ = self.RMA_nsprefix_ + ':' if (UseCapturedNS_ and self.RMA_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRMA>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RMA), input_name='RMA')), namespaceprefix_ , eol_)) + if self.RMAPICFlag is not None: + namespaceprefix_ = self.RMAPICFlag_nsprefix_ + ':' if (UseCapturedNS_ and self.RMAPICFlag_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRMAPICFlag>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RMAPICFlag), input_name='RMAPICFlag')), namespaceprefix_ , eol_)) + if self.ImageType is not None: + namespaceprefix_ = self.ImageType_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageType), input_name='ImageType')), namespaceprefix_ , eol_)) + if self.RMABarcode is not None: + namespaceprefix_ = self.RMABarcode_nsprefix_ + ':' if (UseCapturedNS_ and self.RMABarcode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRMABarcode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RMABarcode), input_name='RMABarcode')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Option': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Option') + value_ = self.gds_validate_string(value_, node, 'Option') + self.Option = value_ + self.Option_nsprefix_ = child_.prefix + elif nodeName_ == 'LabelCount' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'LabelCount') + ival_ = self.gds_validate_integer(ival_, node, 'LabelCount') + self.LabelCount = ival_ + self.LabelCount_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageParameters': + obj_ = ImageParametersType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ImageParameters = obj_ + obj_.original_tagname_ = 'ImageParameters' + elif nodeName_ == 'RetailerName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RetailerName') + value_ = self.gds_validate_string(value_, node, 'RetailerName') + self.RetailerName = value_ + self.RetailerName_nsprefix_ = child_.prefix + elif nodeName_ == 'RetailerAddress': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RetailerAddress') + value_ = self.gds_validate_string(value_, node, 'RetailerAddress') + self.RetailerAddress = value_ + self.RetailerAddress_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitNumber' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'PermitNumber') + ival_ = self.gds_validate_integer(ival_, node, 'PermitNumber') + self.PermitNumber = ival_ + self.PermitNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitIssuingPOCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PermitIssuingPOCity') + value_ = self.gds_validate_string(value_, node, 'PermitIssuingPOCity') + self.PermitIssuingPOCity = value_ + self.PermitIssuingPOCity_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitIssuingPOState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PermitIssuingPOState') + value_ = self.gds_validate_string(value_, node, 'PermitIssuingPOState') + self.PermitIssuingPOState = value_ + self.PermitIssuingPOState_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitIssuingPOZip5' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'PermitIssuingPOZip5') + ival_ = self.gds_validate_integer(ival_, node, 'PermitIssuingPOZip5') + self.PermitIssuingPOZip5 = ival_ + self.PermitIssuingPOZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'PDUFirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PDUFirmName') + value_ = self.gds_validate_string(value_, node, 'PDUFirmName') + self.PDUFirmName = value_ + self.PDUFirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'PDUPOBox': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PDUPOBox') + value_ = self.gds_validate_string(value_, node, 'PDUPOBox') + self.PDUPOBox = value_ + self.PDUPOBox_nsprefix_ = child_.prefix + elif nodeName_ == 'PDUCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PDUCity') + value_ = self.gds_validate_string(value_, node, 'PDUCity') + self.PDUCity = value_ + self.PDUCity_nsprefix_ = child_.prefix + elif nodeName_ == 'PDUState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PDUState') + value_ = self.gds_validate_string(value_, node, 'PDUState') + self.PDUState = value_ + self.PDUState_nsprefix_ = child_.prefix + elif nodeName_ == 'PDUZip5' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'PDUZip5') + ival_ = self.gds_validate_integer(ival_, node, 'PDUZip5') + self.PDUZip5 = ival_ + self.PDUZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'PDUZip4' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'PDUZip4') + ival_ = self.gds_validate_integer(ival_, node, 'PDUZip4') + self.PDUZip4 = ival_ + self.PDUZip4_nsprefix_ = child_.prefix + elif nodeName_ == 'ServiceType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceType') + value_ = self.gds_validate_string(value_, node, 'ServiceType') + self.ServiceType = value_ + self.ServiceType_nsprefix_ = child_.prefix + elif nodeName_ == 'DeliveryConfirmation': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DeliveryConfirmation') + value_ = self.gds_validate_string(value_, node, 'DeliveryConfirmation') + self.DeliveryConfirmation = value_ + self.DeliveryConfirmation_nsprefix_ = child_.prefix + elif nodeName_ == 'InsuranceValue': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'InsuranceValue') + value_ = self.gds_validate_string(value_, node, 'InsuranceValue') + self.InsuranceValue = value_ + self.InsuranceValue_nsprefix_ = child_.prefix + elif nodeName_ == 'MailingAckPackageID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'MailingAckPackageID') + value_ = self.gds_validate_string(value_, node, 'MailingAckPackageID') + self.MailingAckPackageID = value_ + self.MailingAckPackageID_nsprefix_ = child_.prefix + elif nodeName_ == 'WeightInPounds' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'WeightInPounds') + ival_ = self.gds_validate_integer(ival_, node, 'WeightInPounds') + self.WeightInPounds = ival_ + self.WeightInPounds_nsprefix_ = child_.prefix + elif nodeName_ == 'WeightInOunces' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'WeightInOunces') + ival_ = self.gds_validate_integer(ival_, node, 'WeightInOunces') + self.WeightInOunces = ival_ + self.WeightInOunces_nsprefix_ = child_.prefix + elif nodeName_ == 'RMA': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RMA') + value_ = self.gds_validate_string(value_, node, 'RMA') + self.RMA = value_ + self.RMA_nsprefix_ = child_.prefix + elif nodeName_ == 'RMAPICFlag': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RMAPICFlag') + value_ = self.gds_validate_string(value_, node, 'RMAPICFlag') + self.RMAPICFlag = value_ + self.RMAPICFlag_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageType') + value_ = self.gds_validate_string(value_, node, 'ImageType') + self.ImageType = value_ + self.ImageType_nsprefix_ = child_.prefix + elif nodeName_ == 'RMABarcode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RMABarcode') + value_ = self.gds_validate_string(value_, node, 'RMABarcode') + self.RMABarcode = value_ + self.RMABarcode_nsprefix_ = child_.prefix +# end class EMRSV4_0BulkRequest + + +class ImageParametersType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ImageParameter=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if ImageParameter is None: + self.ImageParameter = [] + else: + self.ImageParameter = ImageParameter + self.ImageParameter_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ImageParametersType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ImageParametersType.subclass: + return ImageParametersType.subclass(*args_, **kwargs_) + else: + return ImageParametersType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ImageParameter(self): + return self.ImageParameter + def set_ImageParameter(self, ImageParameter): + self.ImageParameter = ImageParameter + def add_ImageParameter(self, value): + self.ImageParameter.append(value) + def insert_ImageParameter_at(self, index, value): + self.ImageParameter.insert(index, value) + def replace_ImageParameter_at(self, index, value): + self.ImageParameter[index] = value + def has__content(self): + if ( + self.ImageParameter + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ImageParametersType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ImageParametersType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ImageParametersType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ImageParametersType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ImageParametersType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ImageParametersType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ImageParametersType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for ImageParameter_ in self.ImageParameter: + namespaceprefix_ = self.ImageParameter_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageParameter_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageParameter>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(ImageParameter_), input_name='ImageParameter')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ImageParameter': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageParameter') + value_ = self.gds_validate_string(value_, node, 'ImageParameter') + self.ImageParameter.append(value_) + self.ImageParameter_nsprefix_ = child_.prefix +# end class ImageParametersType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'EMRSV4_0BulkRequest' + rootClass = EMRSV4_0BulkRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'EMRSV4_0BulkRequest' + rootClass = EMRSV4_0BulkRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'EMRSV4_0BulkRequest' + rootClass = EMRSV4_0BulkRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'EMRSV4_0BulkRequest' + rootClass = EMRSV4_0BulkRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from emrsv4_0_bulk_request import *\n\n') + sys.stdout.write('import emrsv4_0_bulk_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "EMRSV4_0BulkRequest", + "ImageParametersType" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/error.py b/modules/connectors/usps/karrio/schemas/usps/error.py new file mode 100644 index 0000000000..ad59a57ed2 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/error.py @@ -0,0 +1,1379 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:41 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/error.py') +# +# Command line arguments: +# ./schemas/Error.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/error.py" ./schemas/Error.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class Error(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Number=None, Source=None, Description=None, HelpFile=None, HelpContext=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Number = Number + self.Number_nsprefix_ = None + self.Source = Source + self.Source_nsprefix_ = None + self.Description = Description + self.Description_nsprefix_ = None + self.HelpFile = HelpFile + self.HelpFile_nsprefix_ = None + self.HelpContext = HelpContext + self.HelpContext_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, Error) + if subclass is not None: + return subclass(*args_, **kwargs_) + if Error.subclass: + return Error.subclass(*args_, **kwargs_) + else: + return Error(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Number(self): + return self.Number + def set_Number(self, Number): + self.Number = Number + def get_Source(self): + return self.Source + def set_Source(self, Source): + self.Source = Source + def get_Description(self): + return self.Description + def set_Description(self, Description): + self.Description = Description + def get_HelpFile(self): + return self.HelpFile + def set_HelpFile(self, HelpFile): + self.HelpFile = HelpFile + def get_HelpContext(self): + return self.HelpContext + def set_HelpContext(self, HelpContext): + self.HelpContext = HelpContext + def has__content(self): + if ( + self.Number is not None or + self.Source is not None or + self.Description is not None or + self.HelpFile is not None or + self.HelpContext is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Error', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('Error') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'Error': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Error') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Error', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Error'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Error', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Number is not None: + namespaceprefix_ = self.Number_nsprefix_ + ':' if (UseCapturedNS_ and self.Number_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Number), input_name='Number')), namespaceprefix_ , eol_)) + if self.Source is not None: + namespaceprefix_ = self.Source_nsprefix_ + ':' if (UseCapturedNS_ and self.Source_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSource>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Source), input_name='Source')), namespaceprefix_ , eol_)) + if self.Description is not None: + namespaceprefix_ = self.Description_nsprefix_ + ':' if (UseCapturedNS_ and self.Description_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDescription>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Description), input_name='Description')), namespaceprefix_ , eol_)) + if self.HelpFile is not None: + namespaceprefix_ = self.HelpFile_nsprefix_ + ':' if (UseCapturedNS_ and self.HelpFile_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHelpFile>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.HelpFile), input_name='HelpFile')), namespaceprefix_ , eol_)) + if self.HelpContext is not None: + namespaceprefix_ = self.HelpContext_nsprefix_ + ':' if (UseCapturedNS_ and self.HelpContext_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHelpContext>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.HelpContext), input_name='HelpContext')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Number': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Number') + value_ = self.gds_validate_string(value_, node, 'Number') + self.Number = value_ + self.Number_nsprefix_ = child_.prefix + elif nodeName_ == 'Source': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Source') + value_ = self.gds_validate_string(value_, node, 'Source') + self.Source = value_ + self.Source_nsprefix_ = child_.prefix + elif nodeName_ == 'Description': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Description') + value_ = self.gds_validate_string(value_, node, 'Description') + self.Description = value_ + self.Description_nsprefix_ = child_.prefix + elif nodeName_ == 'HelpFile': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'HelpFile') + value_ = self.gds_validate_string(value_, node, 'HelpFile') + self.HelpFile = value_ + self.HelpFile_nsprefix_ = child_.prefix + elif nodeName_ == 'HelpContext': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'HelpContext') + value_ = self.gds_validate_string(value_, node, 'HelpContext') + self.HelpContext = value_ + self.HelpContext_nsprefix_ = child_.prefix +# end class Error + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'Error' + rootClass = Error + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'Error' + rootClass = Error + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'Error' + rootClass = Error + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'Error' + rootClass = Error + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from error import *\n\n') + sys.stdout.write('import error as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "Error" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/evs_cancel_request.py b/modules/connectors/usps/karrio/schemas/usps/evs_cancel_request.py new file mode 100644 index 0000000000..3f89e3dd2f --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/evs_cancel_request.py @@ -0,0 +1,1335 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:47 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/evs_cancel_request.py') +# +# Command line arguments: +# ./schemas/eVSCancelRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/evs_cancel_request.py" ./schemas/eVSCancelRequest.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class eVSCancelRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, BarcodeNumber=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.BarcodeNumber = BarcodeNumber + self.BarcodeNumber_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, eVSCancelRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if eVSCancelRequest.subclass: + return eVSCancelRequest.subclass(*args_, **kwargs_) + else: + return eVSCancelRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_BarcodeNumber(self): + return self.BarcodeNumber + def set_BarcodeNumber(self, BarcodeNumber): + self.BarcodeNumber = BarcodeNumber + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.BarcodeNumber is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSCancelRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('eVSCancelRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'eVSCancelRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='eVSCancelRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='eVSCancelRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='eVSCancelRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSCancelRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.BarcodeNumber is not None: + namespaceprefix_ = self.BarcodeNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.BarcodeNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBarcodeNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BarcodeNumber), input_name='BarcodeNumber')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'BarcodeNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BarcodeNumber') + value_ = self.gds_validate_string(value_, node, 'BarcodeNumber') + self.BarcodeNumber = value_ + self.BarcodeNumber_nsprefix_ = child_.prefix +# end class eVSCancelRequest + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSCancelRequest' + rootClass = eVSCancelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSCancelRequest' + rootClass = eVSCancelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSCancelRequest' + rootClass = eVSCancelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSCancelRequest' + rootClass = eVSCancelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from evs_cancel_request import *\n\n') + sys.stdout.write('import evs_cancel_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "eVSCancelRequest" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/evs_cancel_response.py b/modules/connectors/usps/karrio/schemas/usps/evs_cancel_response.py new file mode 100644 index 0000000000..3e6ebd3b50 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/evs_cancel_response.py @@ -0,0 +1,1345 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:47 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/evs_cancel_response.py') +# +# Command line arguments: +# ./schemas/eVSCancelResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/evs_cancel_response.py" ./schemas/eVSCancelResponse.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class eVSCancelResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, BarcodeNumber=None, Status=None, Reason=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.BarcodeNumber = BarcodeNumber + self.BarcodeNumber_nsprefix_ = None + self.Status = Status + self.Status_nsprefix_ = None + self.Reason = Reason + self.Reason_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, eVSCancelResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if eVSCancelResponse.subclass: + return eVSCancelResponse.subclass(*args_, **kwargs_) + else: + return eVSCancelResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_BarcodeNumber(self): + return self.BarcodeNumber + def set_BarcodeNumber(self, BarcodeNumber): + self.BarcodeNumber = BarcodeNumber + def get_Status(self): + return self.Status + def set_Status(self, Status): + self.Status = Status + def get_Reason(self): + return self.Reason + def set_Reason(self, Reason): + self.Reason = Reason + def has__content(self): + if ( + self.BarcodeNumber is not None or + self.Status is not None or + self.Reason is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSCancelResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('eVSCancelResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'eVSCancelResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='eVSCancelResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='eVSCancelResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='eVSCancelResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSCancelResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.BarcodeNumber is not None: + namespaceprefix_ = self.BarcodeNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.BarcodeNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBarcodeNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BarcodeNumber), input_name='BarcodeNumber')), namespaceprefix_ , eol_)) + if self.Status is not None: + namespaceprefix_ = self.Status_nsprefix_ + ':' if (UseCapturedNS_ and self.Status_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sStatus>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Status), input_name='Status')), namespaceprefix_ , eol_)) + if self.Reason is not None: + namespaceprefix_ = self.Reason_nsprefix_ + ':' if (UseCapturedNS_ and self.Reason_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sReason>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Reason), input_name='Reason')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'BarcodeNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BarcodeNumber') + value_ = self.gds_validate_string(value_, node, 'BarcodeNumber') + self.BarcodeNumber = value_ + self.BarcodeNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'Status': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Status') + value_ = self.gds_validate_string(value_, node, 'Status') + self.Status = value_ + self.Status_nsprefix_ = child_.prefix + elif nodeName_ == 'Reason': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Reason') + value_ = self.gds_validate_string(value_, node, 'Reason') + self.Reason = value_ + self.Reason_nsprefix_ = child_.prefix +# end class eVSCancelResponse + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSCancelResponse' + rootClass = eVSCancelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSCancelResponse' + rootClass = eVSCancelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSCancelResponse' + rootClass = eVSCancelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSCancelResponse' + rootClass = eVSCancelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from evs_cancel_response import *\n\n') + sys.stdout.write('import evs_cancel_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "eVSCancelResponse" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/evs_express_mail_intl_request.py b/modules/connectors/usps/karrio/schemas/usps/evs_express_mail_intl_request.py new file mode 100644 index 0000000000..68536bb421 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/evs_express_mail_intl_request.py @@ -0,0 +1,3343 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:48 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/evs_express_mail_intl_request.py') +# +# Command line arguments: +# ./schemas/eVSExpressMailIntlRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/evs_express_mail_intl_request.py" ./schemas/eVSExpressMailIntlRequest.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class eVSExpressMailIntlRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, Option=None, Revision=None, ImageParameters=None, FromFirstName=None, FromMiddleInitial=None, FromLastName=None, FromFirm=None, FromAddress1=None, FromAddress2=None, FromUrbanization=None, FromCity=None, FromState=None, FromZip5=None, FromZip4=None, FromPhone=None, FromCustomsReference=None, ToName=None, ToFirstName=None, ToLastName=None, ToFirm=None, ToAddress1=None, ToAddress2=None, ToAddress3=None, ToCity=None, ToProvince=None, ToCountry=None, ToPostalCode=None, ToPOBoxFlag=None, ToPhone=None, ToFax=None, ToEmail=None, ImportersReferenceNumber=None, NonDeliveryOption=None, RedirectName=None, RedirectEmail=None, RedirectSMS=None, RedirectAddress=None, RedirectCity=None, RedirectState=None, RedirectZipCode=None, RedirectZip4=None, Container=None, ShippingContents=None, InsuredNumber=None, InsuredAmount=None, Postage=None, GrossPounds=None, GrossOunces=None, ContentType=None, ContentTypeOther=None, Agreement=None, Comments=None, LicenseNumber=None, CertificateNumber=None, InvoiceNumber=None, ImageType=None, ImageLayout=None, CustomerRefNo=None, CustomerRefNo2=None, POZipCode=None, LabelDate=None, EMCAAccount=None, HoldForManifest=None, EELPFC=None, PriceOptions=None, Length=None, Width=None, Height=None, Girth=None, LabelTime=None, MeterPaymentFlag=None, ActionCode=None, OptOutOfSPE=None, PermitNumber=None, AccountZipCode=None, ImportersReferenceType=None, ImportersTelephoneNumber=None, ImportersFaxNumber=None, ImportersEmail=None, Machinable=None, DestinationRateIndicator=None, MID=None, LogisticsManagerMID=None, CRID=None, VendorCode=None, VendorProductVersionNumber=None, ePostageMailerReporting=None, SenderFirstName=None, SenderLastName=None, SenderBusinessName=None, SenderAddress1=None, SenderCity=None, SenderState=None, SenderZip5=None, SenderPhone=None, SenderEmail=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.Option = Option + self.Option_nsprefix_ = None + self.Revision = Revision + self.Revision_nsprefix_ = None + self.ImageParameters = ImageParameters + self.ImageParameters_nsprefix_ = None + self.FromFirstName = FromFirstName + self.FromFirstName_nsprefix_ = None + self.FromMiddleInitial = FromMiddleInitial + self.FromMiddleInitial_nsprefix_ = None + self.FromLastName = FromLastName + self.FromLastName_nsprefix_ = None + self.FromFirm = FromFirm + self.FromFirm_nsprefix_ = None + self.FromAddress1 = FromAddress1 + self.FromAddress1_nsprefix_ = None + self.FromAddress2 = FromAddress2 + self.FromAddress2_nsprefix_ = None + self.FromUrbanization = FromUrbanization + self.FromUrbanization_nsprefix_ = None + self.FromCity = FromCity + self.FromCity_nsprefix_ = None + self.FromState = FromState + self.FromState_nsprefix_ = None + self.FromZip5 = FromZip5 + self.FromZip5_nsprefix_ = None + self.FromZip4 = FromZip4 + self.FromZip4_nsprefix_ = None + self.FromPhone = FromPhone + self.FromPhone_nsprefix_ = None + self.FromCustomsReference = FromCustomsReference + self.FromCustomsReference_nsprefix_ = None + self.ToName = ToName + self.ToName_nsprefix_ = None + self.ToFirstName = ToFirstName + self.ToFirstName_nsprefix_ = None + self.ToLastName = ToLastName + self.ToLastName_nsprefix_ = None + self.ToFirm = ToFirm + self.ToFirm_nsprefix_ = None + self.ToAddress1 = ToAddress1 + self.ToAddress1_nsprefix_ = None + self.ToAddress2 = ToAddress2 + self.ToAddress2_nsprefix_ = None + self.ToAddress3 = ToAddress3 + self.ToAddress3_nsprefix_ = None + self.ToCity = ToCity + self.ToCity_nsprefix_ = None + self.ToProvince = ToProvince + self.ToProvince_nsprefix_ = None + self.ToCountry = ToCountry + self.ToCountry_nsprefix_ = None + self.ToPostalCode = ToPostalCode + self.ToPostalCode_nsprefix_ = None + self.ToPOBoxFlag = ToPOBoxFlag + self.ToPOBoxFlag_nsprefix_ = None + self.ToPhone = ToPhone + self.ToPhone_nsprefix_ = None + self.ToFax = ToFax + self.ToFax_nsprefix_ = None + self.ToEmail = ToEmail + self.ToEmail_nsprefix_ = None + self.ImportersReferenceNumber = ImportersReferenceNumber + self.ImportersReferenceNumber_nsprefix_ = None + self.NonDeliveryOption = NonDeliveryOption + self.NonDeliveryOption_nsprefix_ = None + self.RedirectName = RedirectName + self.RedirectName_nsprefix_ = None + self.RedirectEmail = RedirectEmail + self.RedirectEmail_nsprefix_ = None + self.RedirectSMS = RedirectSMS + self.RedirectSMS_nsprefix_ = None + self.RedirectAddress = RedirectAddress + self.RedirectAddress_nsprefix_ = None + self.RedirectCity = RedirectCity + self.RedirectCity_nsprefix_ = None + self.RedirectState = RedirectState + self.RedirectState_nsprefix_ = None + self.RedirectZipCode = RedirectZipCode + self.RedirectZipCode_nsprefix_ = None + self.RedirectZip4 = RedirectZip4 + self.RedirectZip4_nsprefix_ = None + self.Container = Container + self.Container_nsprefix_ = None + self.ShippingContents = ShippingContents + self.ShippingContents_nsprefix_ = None + self.InsuredNumber = InsuredNumber + self.InsuredNumber_nsprefix_ = None + self.InsuredAmount = InsuredAmount + self.InsuredAmount_nsprefix_ = None + self.Postage = Postage + self.Postage_nsprefix_ = None + self.GrossPounds = GrossPounds + self.GrossPounds_nsprefix_ = None + self.GrossOunces = GrossOunces + self.GrossOunces_nsprefix_ = None + self.ContentType = ContentType + self.ContentType_nsprefix_ = None + self.ContentTypeOther = ContentTypeOther + self.ContentTypeOther_nsprefix_ = None + self.Agreement = Agreement + self.Agreement_nsprefix_ = None + self.Comments = Comments + self.Comments_nsprefix_ = None + self.LicenseNumber = LicenseNumber + self.LicenseNumber_nsprefix_ = None + self.CertificateNumber = CertificateNumber + self.CertificateNumber_nsprefix_ = None + self.InvoiceNumber = InvoiceNumber + self.InvoiceNumber_nsprefix_ = None + self.ImageType = ImageType + self.ImageType_nsprefix_ = None + self.ImageLayout = ImageLayout + self.ImageLayout_nsprefix_ = None + self.CustomerRefNo = CustomerRefNo + self.CustomerRefNo_nsprefix_ = None + self.CustomerRefNo2 = CustomerRefNo2 + self.CustomerRefNo2_nsprefix_ = None + self.POZipCode = POZipCode + self.POZipCode_nsprefix_ = None + self.LabelDate = LabelDate + self.LabelDate_nsprefix_ = None + self.EMCAAccount = EMCAAccount + self.EMCAAccount_nsprefix_ = None + self.HoldForManifest = HoldForManifest + self.HoldForManifest_nsprefix_ = None + self.EELPFC = EELPFC + self.EELPFC_nsprefix_ = None + self.PriceOptions = PriceOptions + self.PriceOptions_nsprefix_ = None + self.Length = Length + self.Length_nsprefix_ = None + self.Width = Width + self.Width_nsprefix_ = None + self.Height = Height + self.Height_nsprefix_ = None + self.Girth = Girth + self.Girth_nsprefix_ = None + self.LabelTime = LabelTime + self.LabelTime_nsprefix_ = None + self.MeterPaymentFlag = MeterPaymentFlag + self.MeterPaymentFlag_nsprefix_ = None + self.ActionCode = ActionCode + self.ActionCode_nsprefix_ = None + self.OptOutOfSPE = OptOutOfSPE + self.OptOutOfSPE_nsprefix_ = None + self.PermitNumber = PermitNumber + self.PermitNumber_nsprefix_ = None + self.AccountZipCode = AccountZipCode + self.AccountZipCode_nsprefix_ = None + self.ImportersReferenceType = ImportersReferenceType + self.ImportersReferenceType_nsprefix_ = None + self.ImportersTelephoneNumber = ImportersTelephoneNumber + self.ImportersTelephoneNumber_nsprefix_ = None + self.ImportersFaxNumber = ImportersFaxNumber + self.ImportersFaxNumber_nsprefix_ = None + self.ImportersEmail = ImportersEmail + self.ImportersEmail_nsprefix_ = None + self.Machinable = Machinable + self.Machinable_nsprefix_ = None + self.DestinationRateIndicator = DestinationRateIndicator + self.DestinationRateIndicator_nsprefix_ = None + self.MID = MID + self.MID_nsprefix_ = None + self.LogisticsManagerMID = LogisticsManagerMID + self.LogisticsManagerMID_nsprefix_ = None + self.CRID = CRID + self.CRID_nsprefix_ = None + self.VendorCode = VendorCode + self.VendorCode_nsprefix_ = None + self.VendorProductVersionNumber = VendorProductVersionNumber + self.VendorProductVersionNumber_nsprefix_ = None + self.ePostageMailerReporting = ePostageMailerReporting + self.ePostageMailerReporting_nsprefix_ = None + self.SenderFirstName = SenderFirstName + self.SenderFirstName_nsprefix_ = None + self.SenderLastName = SenderLastName + self.SenderLastName_nsprefix_ = None + self.SenderBusinessName = SenderBusinessName + self.SenderBusinessName_nsprefix_ = None + self.SenderAddress1 = SenderAddress1 + self.SenderAddress1_nsprefix_ = None + self.SenderCity = SenderCity + self.SenderCity_nsprefix_ = None + self.SenderState = SenderState + self.SenderState_nsprefix_ = None + self.SenderZip5 = SenderZip5 + self.SenderZip5_nsprefix_ = None + self.SenderPhone = SenderPhone + self.SenderPhone_nsprefix_ = None + self.SenderEmail = SenderEmail + self.SenderEmail_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, eVSExpressMailIntlRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if eVSExpressMailIntlRequest.subclass: + return eVSExpressMailIntlRequest.subclass(*args_, **kwargs_) + else: + return eVSExpressMailIntlRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Option(self): + return self.Option + def set_Option(self, Option): + self.Option = Option + def get_Revision(self): + return self.Revision + def set_Revision(self, Revision): + self.Revision = Revision + def get_ImageParameters(self): + return self.ImageParameters + def set_ImageParameters(self, ImageParameters): + self.ImageParameters = ImageParameters + def get_FromFirstName(self): + return self.FromFirstName + def set_FromFirstName(self, FromFirstName): + self.FromFirstName = FromFirstName + def get_FromMiddleInitial(self): + return self.FromMiddleInitial + def set_FromMiddleInitial(self, FromMiddleInitial): + self.FromMiddleInitial = FromMiddleInitial + def get_FromLastName(self): + return self.FromLastName + def set_FromLastName(self, FromLastName): + self.FromLastName = FromLastName + def get_FromFirm(self): + return self.FromFirm + def set_FromFirm(self, FromFirm): + self.FromFirm = FromFirm + def get_FromAddress1(self): + return self.FromAddress1 + def set_FromAddress1(self, FromAddress1): + self.FromAddress1 = FromAddress1 + def get_FromAddress2(self): + return self.FromAddress2 + def set_FromAddress2(self, FromAddress2): + self.FromAddress2 = FromAddress2 + def get_FromUrbanization(self): + return self.FromUrbanization + def set_FromUrbanization(self, FromUrbanization): + self.FromUrbanization = FromUrbanization + def get_FromCity(self): + return self.FromCity + def set_FromCity(self, FromCity): + self.FromCity = FromCity + def get_FromState(self): + return self.FromState + def set_FromState(self, FromState): + self.FromState = FromState + def get_FromZip5(self): + return self.FromZip5 + def set_FromZip5(self, FromZip5): + self.FromZip5 = FromZip5 + def get_FromZip4(self): + return self.FromZip4 + def set_FromZip4(self, FromZip4): + self.FromZip4 = FromZip4 + def get_FromPhone(self): + return self.FromPhone + def set_FromPhone(self, FromPhone): + self.FromPhone = FromPhone + def get_FromCustomsReference(self): + return self.FromCustomsReference + def set_FromCustomsReference(self, FromCustomsReference): + self.FromCustomsReference = FromCustomsReference + def get_ToName(self): + return self.ToName + def set_ToName(self, ToName): + self.ToName = ToName + def get_ToFirstName(self): + return self.ToFirstName + def set_ToFirstName(self, ToFirstName): + self.ToFirstName = ToFirstName + def get_ToLastName(self): + return self.ToLastName + def set_ToLastName(self, ToLastName): + self.ToLastName = ToLastName + def get_ToFirm(self): + return self.ToFirm + def set_ToFirm(self, ToFirm): + self.ToFirm = ToFirm + def get_ToAddress1(self): + return self.ToAddress1 + def set_ToAddress1(self, ToAddress1): + self.ToAddress1 = ToAddress1 + def get_ToAddress2(self): + return self.ToAddress2 + def set_ToAddress2(self, ToAddress2): + self.ToAddress2 = ToAddress2 + def get_ToAddress3(self): + return self.ToAddress3 + def set_ToAddress3(self, ToAddress3): + self.ToAddress3 = ToAddress3 + def get_ToCity(self): + return self.ToCity + def set_ToCity(self, ToCity): + self.ToCity = ToCity + def get_ToProvince(self): + return self.ToProvince + def set_ToProvince(self, ToProvince): + self.ToProvince = ToProvince + def get_ToCountry(self): + return self.ToCountry + def set_ToCountry(self, ToCountry): + self.ToCountry = ToCountry + def get_ToPostalCode(self): + return self.ToPostalCode + def set_ToPostalCode(self, ToPostalCode): + self.ToPostalCode = ToPostalCode + def get_ToPOBoxFlag(self): + return self.ToPOBoxFlag + def set_ToPOBoxFlag(self, ToPOBoxFlag): + self.ToPOBoxFlag = ToPOBoxFlag + def get_ToPhone(self): + return self.ToPhone + def set_ToPhone(self, ToPhone): + self.ToPhone = ToPhone + def get_ToFax(self): + return self.ToFax + def set_ToFax(self, ToFax): + self.ToFax = ToFax + def get_ToEmail(self): + return self.ToEmail + def set_ToEmail(self, ToEmail): + self.ToEmail = ToEmail + def get_ImportersReferenceNumber(self): + return self.ImportersReferenceNumber + def set_ImportersReferenceNumber(self, ImportersReferenceNumber): + self.ImportersReferenceNumber = ImportersReferenceNumber + def get_NonDeliveryOption(self): + return self.NonDeliveryOption + def set_NonDeliveryOption(self, NonDeliveryOption): + self.NonDeliveryOption = NonDeliveryOption + def get_RedirectName(self): + return self.RedirectName + def set_RedirectName(self, RedirectName): + self.RedirectName = RedirectName + def get_RedirectEmail(self): + return self.RedirectEmail + def set_RedirectEmail(self, RedirectEmail): + self.RedirectEmail = RedirectEmail + def get_RedirectSMS(self): + return self.RedirectSMS + def set_RedirectSMS(self, RedirectSMS): + self.RedirectSMS = RedirectSMS + def get_RedirectAddress(self): + return self.RedirectAddress + def set_RedirectAddress(self, RedirectAddress): + self.RedirectAddress = RedirectAddress + def get_RedirectCity(self): + return self.RedirectCity + def set_RedirectCity(self, RedirectCity): + self.RedirectCity = RedirectCity + def get_RedirectState(self): + return self.RedirectState + def set_RedirectState(self, RedirectState): + self.RedirectState = RedirectState + def get_RedirectZipCode(self): + return self.RedirectZipCode + def set_RedirectZipCode(self, RedirectZipCode): + self.RedirectZipCode = RedirectZipCode + def get_RedirectZip4(self): + return self.RedirectZip4 + def set_RedirectZip4(self, RedirectZip4): + self.RedirectZip4 = RedirectZip4 + def get_Container(self): + return self.Container + def set_Container(self, Container): + self.Container = Container + def get_ShippingContents(self): + return self.ShippingContents + def set_ShippingContents(self, ShippingContents): + self.ShippingContents = ShippingContents + def get_InsuredNumber(self): + return self.InsuredNumber + def set_InsuredNumber(self, InsuredNumber): + self.InsuredNumber = InsuredNumber + def get_InsuredAmount(self): + return self.InsuredAmount + def set_InsuredAmount(self, InsuredAmount): + self.InsuredAmount = InsuredAmount + def get_Postage(self): + return self.Postage + def set_Postage(self, Postage): + self.Postage = Postage + def get_GrossPounds(self): + return self.GrossPounds + def set_GrossPounds(self, GrossPounds): + self.GrossPounds = GrossPounds + def get_GrossOunces(self): + return self.GrossOunces + def set_GrossOunces(self, GrossOunces): + self.GrossOunces = GrossOunces + def get_ContentType(self): + return self.ContentType + def set_ContentType(self, ContentType): + self.ContentType = ContentType + def get_ContentTypeOther(self): + return self.ContentTypeOther + def set_ContentTypeOther(self, ContentTypeOther): + self.ContentTypeOther = ContentTypeOther + def get_Agreement(self): + return self.Agreement + def set_Agreement(self, Agreement): + self.Agreement = Agreement + def get_Comments(self): + return self.Comments + def set_Comments(self, Comments): + self.Comments = Comments + def get_LicenseNumber(self): + return self.LicenseNumber + def set_LicenseNumber(self, LicenseNumber): + self.LicenseNumber = LicenseNumber + def get_CertificateNumber(self): + return self.CertificateNumber + def set_CertificateNumber(self, CertificateNumber): + self.CertificateNumber = CertificateNumber + def get_InvoiceNumber(self): + return self.InvoiceNumber + def set_InvoiceNumber(self, InvoiceNumber): + self.InvoiceNumber = InvoiceNumber + def get_ImageType(self): + return self.ImageType + def set_ImageType(self, ImageType): + self.ImageType = ImageType + def get_ImageLayout(self): + return self.ImageLayout + def set_ImageLayout(self, ImageLayout): + self.ImageLayout = ImageLayout + def get_CustomerRefNo(self): + return self.CustomerRefNo + def set_CustomerRefNo(self, CustomerRefNo): + self.CustomerRefNo = CustomerRefNo + def get_CustomerRefNo2(self): + return self.CustomerRefNo2 + def set_CustomerRefNo2(self, CustomerRefNo2): + self.CustomerRefNo2 = CustomerRefNo2 + def get_POZipCode(self): + return self.POZipCode + def set_POZipCode(self, POZipCode): + self.POZipCode = POZipCode + def get_LabelDate(self): + return self.LabelDate + def set_LabelDate(self, LabelDate): + self.LabelDate = LabelDate + def get_EMCAAccount(self): + return self.EMCAAccount + def set_EMCAAccount(self, EMCAAccount): + self.EMCAAccount = EMCAAccount + def get_HoldForManifest(self): + return self.HoldForManifest + def set_HoldForManifest(self, HoldForManifest): + self.HoldForManifest = HoldForManifest + def get_EELPFC(self): + return self.EELPFC + def set_EELPFC(self, EELPFC): + self.EELPFC = EELPFC + def get_PriceOptions(self): + return self.PriceOptions + def set_PriceOptions(self, PriceOptions): + self.PriceOptions = PriceOptions + def get_Length(self): + return self.Length + def set_Length(self, Length): + self.Length = Length + def get_Width(self): + return self.Width + def set_Width(self, Width): + self.Width = Width + def get_Height(self): + return self.Height + def set_Height(self, Height): + self.Height = Height + def get_Girth(self): + return self.Girth + def set_Girth(self, Girth): + self.Girth = Girth + def get_LabelTime(self): + return self.LabelTime + def set_LabelTime(self, LabelTime): + self.LabelTime = LabelTime + def get_MeterPaymentFlag(self): + return self.MeterPaymentFlag + def set_MeterPaymentFlag(self, MeterPaymentFlag): + self.MeterPaymentFlag = MeterPaymentFlag + def get_ActionCode(self): + return self.ActionCode + def set_ActionCode(self, ActionCode): + self.ActionCode = ActionCode + def get_OptOutOfSPE(self): + return self.OptOutOfSPE + def set_OptOutOfSPE(self, OptOutOfSPE): + self.OptOutOfSPE = OptOutOfSPE + def get_PermitNumber(self): + return self.PermitNumber + def set_PermitNumber(self, PermitNumber): + self.PermitNumber = PermitNumber + def get_AccountZipCode(self): + return self.AccountZipCode + def set_AccountZipCode(self, AccountZipCode): + self.AccountZipCode = AccountZipCode + def get_ImportersReferenceType(self): + return self.ImportersReferenceType + def set_ImportersReferenceType(self, ImportersReferenceType): + self.ImportersReferenceType = ImportersReferenceType + def get_ImportersTelephoneNumber(self): + return self.ImportersTelephoneNumber + def set_ImportersTelephoneNumber(self, ImportersTelephoneNumber): + self.ImportersTelephoneNumber = ImportersTelephoneNumber + def get_ImportersFaxNumber(self): + return self.ImportersFaxNumber + def set_ImportersFaxNumber(self, ImportersFaxNumber): + self.ImportersFaxNumber = ImportersFaxNumber + def get_ImportersEmail(self): + return self.ImportersEmail + def set_ImportersEmail(self, ImportersEmail): + self.ImportersEmail = ImportersEmail + def get_Machinable(self): + return self.Machinable + def set_Machinable(self, Machinable): + self.Machinable = Machinable + def get_DestinationRateIndicator(self): + return self.DestinationRateIndicator + def set_DestinationRateIndicator(self, DestinationRateIndicator): + self.DestinationRateIndicator = DestinationRateIndicator + def get_MID(self): + return self.MID + def set_MID(self, MID): + self.MID = MID + def get_LogisticsManagerMID(self): + return self.LogisticsManagerMID + def set_LogisticsManagerMID(self, LogisticsManagerMID): + self.LogisticsManagerMID = LogisticsManagerMID + def get_CRID(self): + return self.CRID + def set_CRID(self, CRID): + self.CRID = CRID + def get_VendorCode(self): + return self.VendorCode + def set_VendorCode(self, VendorCode): + self.VendorCode = VendorCode + def get_VendorProductVersionNumber(self): + return self.VendorProductVersionNumber + def set_VendorProductVersionNumber(self, VendorProductVersionNumber): + self.VendorProductVersionNumber = VendorProductVersionNumber + def get_ePostageMailerReporting(self): + return self.ePostageMailerReporting + def set_ePostageMailerReporting(self, ePostageMailerReporting): + self.ePostageMailerReporting = ePostageMailerReporting + def get_SenderFirstName(self): + return self.SenderFirstName + def set_SenderFirstName(self, SenderFirstName): + self.SenderFirstName = SenderFirstName + def get_SenderLastName(self): + return self.SenderLastName + def set_SenderLastName(self, SenderLastName): + self.SenderLastName = SenderLastName + def get_SenderBusinessName(self): + return self.SenderBusinessName + def set_SenderBusinessName(self, SenderBusinessName): + self.SenderBusinessName = SenderBusinessName + def get_SenderAddress1(self): + return self.SenderAddress1 + def set_SenderAddress1(self, SenderAddress1): + self.SenderAddress1 = SenderAddress1 + def get_SenderCity(self): + return self.SenderCity + def set_SenderCity(self, SenderCity): + self.SenderCity = SenderCity + def get_SenderState(self): + return self.SenderState + def set_SenderState(self, SenderState): + self.SenderState = SenderState + def get_SenderZip5(self): + return self.SenderZip5 + def set_SenderZip5(self, SenderZip5): + self.SenderZip5 = SenderZip5 + def get_SenderPhone(self): + return self.SenderPhone + def set_SenderPhone(self, SenderPhone): + self.SenderPhone = SenderPhone + def get_SenderEmail(self): + return self.SenderEmail + def set_SenderEmail(self, SenderEmail): + self.SenderEmail = SenderEmail + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.Option is not None or + self.Revision is not None or + self.ImageParameters is not None or + self.FromFirstName is not None or + self.FromMiddleInitial is not None or + self.FromLastName is not None or + self.FromFirm is not None or + self.FromAddress1 is not None or + self.FromAddress2 is not None or + self.FromUrbanization is not None or + self.FromCity is not None or + self.FromState is not None or + self.FromZip5 is not None or + self.FromZip4 is not None or + self.FromPhone is not None or + self.FromCustomsReference is not None or + self.ToName is not None or + self.ToFirstName is not None or + self.ToLastName is not None or + self.ToFirm is not None or + self.ToAddress1 is not None or + self.ToAddress2 is not None or + self.ToAddress3 is not None or + self.ToCity is not None or + self.ToProvince is not None or + self.ToCountry is not None or + self.ToPostalCode is not None or + self.ToPOBoxFlag is not None or + self.ToPhone is not None or + self.ToFax is not None or + self.ToEmail is not None or + self.ImportersReferenceNumber is not None or + self.NonDeliveryOption is not None or + self.RedirectName is not None or + self.RedirectEmail is not None or + self.RedirectSMS is not None or + self.RedirectAddress is not None or + self.RedirectCity is not None or + self.RedirectState is not None or + self.RedirectZipCode is not None or + self.RedirectZip4 is not None or + self.Container is not None or + self.ShippingContents is not None or + self.InsuredNumber is not None or + self.InsuredAmount is not None or + self.Postage is not None or + self.GrossPounds is not None or + self.GrossOunces is not None or + self.ContentType is not None or + self.ContentTypeOther is not None or + self.Agreement is not None or + self.Comments is not None or + self.LicenseNumber is not None or + self.CertificateNumber is not None or + self.InvoiceNumber is not None or + self.ImageType is not None or + self.ImageLayout is not None or + self.CustomerRefNo is not None or + self.CustomerRefNo2 is not None or + self.POZipCode is not None or + self.LabelDate is not None or + self.EMCAAccount is not None or + self.HoldForManifest is not None or + self.EELPFC is not None or + self.PriceOptions is not None or + self.Length is not None or + self.Width is not None or + self.Height is not None or + self.Girth is not None or + self.LabelTime is not None or + self.MeterPaymentFlag is not None or + self.ActionCode is not None or + self.OptOutOfSPE is not None or + self.PermitNumber is not None or + self.AccountZipCode is not None or + self.ImportersReferenceType is not None or + self.ImportersTelephoneNumber is not None or + self.ImportersFaxNumber is not None or + self.ImportersEmail is not None or + self.Machinable is not None or + self.DestinationRateIndicator is not None or + self.MID is not None or + self.LogisticsManagerMID is not None or + self.CRID is not None or + self.VendorCode is not None or + self.VendorProductVersionNumber is not None or + self.ePostageMailerReporting is not None or + self.SenderFirstName is not None or + self.SenderLastName is not None or + self.SenderBusinessName is not None or + self.SenderAddress1 is not None or + self.SenderCity is not None or + self.SenderState is not None or + self.SenderZip5 is not None or + self.SenderPhone is not None or + self.SenderEmail is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSExpressMailIntlRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('eVSExpressMailIntlRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'eVSExpressMailIntlRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='eVSExpressMailIntlRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='eVSExpressMailIntlRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='eVSExpressMailIntlRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSExpressMailIntlRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Option is not None: + namespaceprefix_ = self.Option_nsprefix_ + ':' if (UseCapturedNS_ and self.Option_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOption>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Option), input_name='Option')), namespaceprefix_ , eol_)) + if self.Revision is not None: + namespaceprefix_ = self.Revision_nsprefix_ + ':' if (UseCapturedNS_ and self.Revision_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRevision>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Revision), input_name='Revision')), namespaceprefix_ , eol_)) + if self.ImageParameters is not None: + namespaceprefix_ = self.ImageParameters_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageParameters_nsprefix_) else '' + self.ImageParameters.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ImageParameters', pretty_print=pretty_print) + if self.FromFirstName is not None: + namespaceprefix_ = self.FromFirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.FromFirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromFirstName), input_name='FromFirstName')), namespaceprefix_ , eol_)) + if self.FromMiddleInitial is not None: + namespaceprefix_ = self.FromMiddleInitial_nsprefix_ + ':' if (UseCapturedNS_ and self.FromMiddleInitial_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromMiddleInitial>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromMiddleInitial), input_name='FromMiddleInitial')), namespaceprefix_ , eol_)) + if self.FromLastName is not None: + namespaceprefix_ = self.FromLastName_nsprefix_ + ':' if (UseCapturedNS_ and self.FromLastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromLastName), input_name='FromLastName')), namespaceprefix_ , eol_)) + if self.FromFirm is not None: + namespaceprefix_ = self.FromFirm_nsprefix_ + ':' if (UseCapturedNS_ and self.FromFirm_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromFirm>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromFirm), input_name='FromFirm')), namespaceprefix_ , eol_)) + if self.FromAddress1 is not None: + namespaceprefix_ = self.FromAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.FromAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromAddress1), input_name='FromAddress1')), namespaceprefix_ , eol_)) + if self.FromAddress2 is not None: + namespaceprefix_ = self.FromAddress2_nsprefix_ + ':' if (UseCapturedNS_ and self.FromAddress2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromAddress2), input_name='FromAddress2')), namespaceprefix_ , eol_)) + if self.FromUrbanization is not None: + namespaceprefix_ = self.FromUrbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.FromUrbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromUrbanization), input_name='FromUrbanization')), namespaceprefix_ , eol_)) + if self.FromCity is not None: + namespaceprefix_ = self.FromCity_nsprefix_ + ':' if (UseCapturedNS_ and self.FromCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromCity), input_name='FromCity')), namespaceprefix_ , eol_)) + if self.FromState is not None: + namespaceprefix_ = self.FromState_nsprefix_ + ':' if (UseCapturedNS_ and self.FromState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromState), input_name='FromState')), namespaceprefix_ , eol_)) + if self.FromZip5 is not None: + namespaceprefix_ = self.FromZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.FromZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromZip5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromZip5), input_name='FromZip5')), namespaceprefix_ , eol_)) + if self.FromZip4 is not None: + namespaceprefix_ = self.FromZip4_nsprefix_ + ':' if (UseCapturedNS_ and self.FromZip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromZip4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromZip4), input_name='FromZip4')), namespaceprefix_ , eol_)) + if self.FromPhone is not None: + namespaceprefix_ = self.FromPhone_nsprefix_ + ':' if (UseCapturedNS_ and self.FromPhone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromPhone), input_name='FromPhone')), namespaceprefix_ , eol_)) + if self.FromCustomsReference is not None: + namespaceprefix_ = self.FromCustomsReference_nsprefix_ + ':' if (UseCapturedNS_ and self.FromCustomsReference_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromCustomsReference>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromCustomsReference), input_name='FromCustomsReference')), namespaceprefix_ , eol_)) + if self.ToName is not None: + namespaceprefix_ = self.ToName_nsprefix_ + ':' if (UseCapturedNS_ and self.ToName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToName), input_name='ToName')), namespaceprefix_ , eol_)) + if self.ToFirstName is not None: + namespaceprefix_ = self.ToFirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.ToFirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToFirstName), input_name='ToFirstName')), namespaceprefix_ , eol_)) + if self.ToLastName is not None: + namespaceprefix_ = self.ToLastName_nsprefix_ + ':' if (UseCapturedNS_ and self.ToLastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToLastName), input_name='ToLastName')), namespaceprefix_ , eol_)) + if self.ToFirm is not None: + namespaceprefix_ = self.ToFirm_nsprefix_ + ':' if (UseCapturedNS_ and self.ToFirm_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToFirm>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToFirm), input_name='ToFirm')), namespaceprefix_ , eol_)) + if self.ToAddress1 is not None: + namespaceprefix_ = self.ToAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress1), input_name='ToAddress1')), namespaceprefix_ , eol_)) + if self.ToAddress2 is not None: + namespaceprefix_ = self.ToAddress2_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress2), input_name='ToAddress2')), namespaceprefix_ , eol_)) + if self.ToAddress3 is not None: + namespaceprefix_ = self.ToAddress3_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress3_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress3>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress3), input_name='ToAddress3')), namespaceprefix_ , eol_)) + if self.ToCity is not None: + namespaceprefix_ = self.ToCity_nsprefix_ + ':' if (UseCapturedNS_ and self.ToCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToCity), input_name='ToCity')), namespaceprefix_ , eol_)) + if self.ToProvince is not None: + namespaceprefix_ = self.ToProvince_nsprefix_ + ':' if (UseCapturedNS_ and self.ToProvince_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToProvince>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToProvince), input_name='ToProvince')), namespaceprefix_ , eol_)) + if self.ToCountry is not None: + namespaceprefix_ = self.ToCountry_nsprefix_ + ':' if (UseCapturedNS_ and self.ToCountry_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToCountry>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToCountry), input_name='ToCountry')), namespaceprefix_ , eol_)) + if self.ToPostalCode is not None: + namespaceprefix_ = self.ToPostalCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ToPostalCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToPostalCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToPostalCode), input_name='ToPostalCode')), namespaceprefix_ , eol_)) + if self.ToPOBoxFlag is not None: + namespaceprefix_ = self.ToPOBoxFlag_nsprefix_ + ':' if (UseCapturedNS_ and self.ToPOBoxFlag_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToPOBoxFlag>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToPOBoxFlag), input_name='ToPOBoxFlag')), namespaceprefix_ , eol_)) + if self.ToPhone is not None: + namespaceprefix_ = self.ToPhone_nsprefix_ + ':' if (UseCapturedNS_ and self.ToPhone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToPhone), input_name='ToPhone')), namespaceprefix_ , eol_)) + if self.ToFax is not None: + namespaceprefix_ = self.ToFax_nsprefix_ + ':' if (UseCapturedNS_ and self.ToFax_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToFax>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToFax), input_name='ToFax')), namespaceprefix_ , eol_)) + if self.ToEmail is not None: + namespaceprefix_ = self.ToEmail_nsprefix_ + ':' if (UseCapturedNS_ and self.ToEmail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToEmail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToEmail), input_name='ToEmail')), namespaceprefix_ , eol_)) + if self.ImportersReferenceNumber is not None: + namespaceprefix_ = self.ImportersReferenceNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.ImportersReferenceNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImportersReferenceNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImportersReferenceNumber), input_name='ImportersReferenceNumber')), namespaceprefix_ , eol_)) + if self.NonDeliveryOption is not None: + namespaceprefix_ = self.NonDeliveryOption_nsprefix_ + ':' if (UseCapturedNS_ and self.NonDeliveryOption_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNonDeliveryOption>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.NonDeliveryOption), input_name='NonDeliveryOption')), namespaceprefix_ , eol_)) + if self.RedirectName is not None: + namespaceprefix_ = self.RedirectName_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectName), input_name='RedirectName')), namespaceprefix_ , eol_)) + if self.RedirectEmail is not None: + namespaceprefix_ = self.RedirectEmail_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectEmail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectEmail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectEmail), input_name='RedirectEmail')), namespaceprefix_ , eol_)) + if self.RedirectSMS is not None: + namespaceprefix_ = self.RedirectSMS_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectSMS_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectSMS>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectSMS), input_name='RedirectSMS')), namespaceprefix_ , eol_)) + if self.RedirectAddress is not None: + namespaceprefix_ = self.RedirectAddress_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectAddress_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectAddress>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectAddress), input_name='RedirectAddress')), namespaceprefix_ , eol_)) + if self.RedirectCity is not None: + namespaceprefix_ = self.RedirectCity_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectCity), input_name='RedirectCity')), namespaceprefix_ , eol_)) + if self.RedirectState is not None: + namespaceprefix_ = self.RedirectState_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectState), input_name='RedirectState')), namespaceprefix_ , eol_)) + if self.RedirectZipCode is not None: + namespaceprefix_ = self.RedirectZipCode_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectZipCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectZipCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectZipCode), input_name='RedirectZipCode')), namespaceprefix_ , eol_)) + if self.RedirectZip4 is not None: + namespaceprefix_ = self.RedirectZip4_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectZip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectZip4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectZip4), input_name='RedirectZip4')), namespaceprefix_ , eol_)) + if self.Container is not None: + namespaceprefix_ = self.Container_nsprefix_ + ':' if (UseCapturedNS_ and self.Container_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContainer>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Container), input_name='Container')), namespaceprefix_ , eol_)) + if self.ShippingContents is not None: + namespaceprefix_ = self.ShippingContents_nsprefix_ + ':' if (UseCapturedNS_ and self.ShippingContents_nsprefix_) else '' + self.ShippingContents.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ShippingContents', pretty_print=pretty_print) + if self.InsuredNumber is not None: + namespaceprefix_ = self.InsuredNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.InsuredNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInsuredNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InsuredNumber), input_name='InsuredNumber')), namespaceprefix_ , eol_)) + if self.InsuredAmount is not None: + namespaceprefix_ = self.InsuredAmount_nsprefix_ + ':' if (UseCapturedNS_ and self.InsuredAmount_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInsuredAmount>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.InsuredAmount, input_name='InsuredAmount'), namespaceprefix_ , eol_)) + if self.Postage is not None: + namespaceprefix_ = self.Postage_nsprefix_ + ':' if (UseCapturedNS_ and self.Postage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPostage>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Postage), input_name='Postage')), namespaceprefix_ , eol_)) + if self.GrossPounds is not None: + namespaceprefix_ = self.GrossPounds_nsprefix_ + ':' if (UseCapturedNS_ and self.GrossPounds_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGrossPounds>%s%s' % (namespaceprefix_ , self.gds_format_float(self.GrossPounds, input_name='GrossPounds'), namespaceprefix_ , eol_)) + if self.GrossOunces is not None: + namespaceprefix_ = self.GrossOunces_nsprefix_ + ':' if (UseCapturedNS_ and self.GrossOunces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGrossOunces>%s%s' % (namespaceprefix_ , self.gds_format_float(self.GrossOunces, input_name='GrossOunces'), namespaceprefix_ , eol_)) + if self.ContentType is not None: + namespaceprefix_ = self.ContentType_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentType), input_name='ContentType')), namespaceprefix_ , eol_)) + if self.ContentTypeOther is not None: + namespaceprefix_ = self.ContentTypeOther_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentTypeOther_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentTypeOther>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentTypeOther), input_name='ContentTypeOther')), namespaceprefix_ , eol_)) + if self.Agreement is not None: + namespaceprefix_ = self.Agreement_nsprefix_ + ':' if (UseCapturedNS_ and self.Agreement_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAgreement>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Agreement), input_name='Agreement')), namespaceprefix_ , eol_)) + if self.Comments is not None: + namespaceprefix_ = self.Comments_nsprefix_ + ':' if (UseCapturedNS_ and self.Comments_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sComments>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Comments), input_name='Comments')), namespaceprefix_ , eol_)) + if self.LicenseNumber is not None: + namespaceprefix_ = self.LicenseNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.LicenseNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLicenseNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LicenseNumber), input_name='LicenseNumber')), namespaceprefix_ , eol_)) + if self.CertificateNumber is not None: + namespaceprefix_ = self.CertificateNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.CertificateNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCertificateNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CertificateNumber), input_name='CertificateNumber')), namespaceprefix_ , eol_)) + if self.InvoiceNumber is not None: + namespaceprefix_ = self.InvoiceNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.InvoiceNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInvoiceNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InvoiceNumber), input_name='InvoiceNumber')), namespaceprefix_ , eol_)) + if self.ImageType is not None: + namespaceprefix_ = self.ImageType_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageType), input_name='ImageType')), namespaceprefix_ , eol_)) + if self.ImageLayout is not None: + namespaceprefix_ = self.ImageLayout_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageLayout_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageLayout>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageLayout), input_name='ImageLayout')), namespaceprefix_ , eol_)) + if self.CustomerRefNo is not None: + namespaceprefix_ = self.CustomerRefNo_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerRefNo_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerRefNo>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerRefNo), input_name='CustomerRefNo')), namespaceprefix_ , eol_)) + if self.CustomerRefNo2 is not None: + namespaceprefix_ = self.CustomerRefNo2_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerRefNo2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerRefNo2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerRefNo2), input_name='CustomerRefNo2')), namespaceprefix_ , eol_)) + if self.POZipCode is not None: + namespaceprefix_ = self.POZipCode_nsprefix_ + ':' if (UseCapturedNS_ and self.POZipCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPOZipCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.POZipCode), input_name='POZipCode')), namespaceprefix_ , eol_)) + if self.LabelDate is not None: + namespaceprefix_ = self.LabelDate_nsprefix_ + ':' if (UseCapturedNS_ and self.LabelDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLabelDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LabelDate), input_name='LabelDate')), namespaceprefix_ , eol_)) + if self.EMCAAccount is not None: + namespaceprefix_ = self.EMCAAccount_nsprefix_ + ':' if (UseCapturedNS_ and self.EMCAAccount_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEMCAAccount>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EMCAAccount), input_name='EMCAAccount')), namespaceprefix_ , eol_)) + if self.HoldForManifest is not None: + namespaceprefix_ = self.HoldForManifest_nsprefix_ + ':' if (UseCapturedNS_ and self.HoldForManifest_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHoldForManifest>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.HoldForManifest), input_name='HoldForManifest')), namespaceprefix_ , eol_)) + if self.EELPFC is not None: + namespaceprefix_ = self.EELPFC_nsprefix_ + ':' if (UseCapturedNS_ and self.EELPFC_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEELPFC>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EELPFC), input_name='EELPFC')), namespaceprefix_ , eol_)) + if self.PriceOptions is not None: + namespaceprefix_ = self.PriceOptions_nsprefix_ + ':' if (UseCapturedNS_ and self.PriceOptions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPriceOptions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PriceOptions), input_name='PriceOptions')), namespaceprefix_ , eol_)) + if self.Length is not None: + namespaceprefix_ = self.Length_nsprefix_ + ':' if (UseCapturedNS_ and self.Length_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLength>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Length, input_name='Length'), namespaceprefix_ , eol_)) + if self.Width is not None: + namespaceprefix_ = self.Width_nsprefix_ + ':' if (UseCapturedNS_ and self.Width_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sWidth>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Width, input_name='Width'), namespaceprefix_ , eol_)) + if self.Height is not None: + namespaceprefix_ = self.Height_nsprefix_ + ':' if (UseCapturedNS_ and self.Height_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHeight>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Height, input_name='Height'), namespaceprefix_ , eol_)) + if self.Girth is not None: + namespaceprefix_ = self.Girth_nsprefix_ + ':' if (UseCapturedNS_ and self.Girth_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGirth>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Girth, input_name='Girth'), namespaceprefix_ , eol_)) + if self.LabelTime is not None: + namespaceprefix_ = self.LabelTime_nsprefix_ + ':' if (UseCapturedNS_ and self.LabelTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLabelTime>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LabelTime), input_name='LabelTime')), namespaceprefix_ , eol_)) + if self.MeterPaymentFlag is not None: + namespaceprefix_ = self.MeterPaymentFlag_nsprefix_ + ':' if (UseCapturedNS_ and self.MeterPaymentFlag_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMeterPaymentFlag>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MeterPaymentFlag), input_name='MeterPaymentFlag')), namespaceprefix_ , eol_)) + if self.ActionCode is not None: + namespaceprefix_ = self.ActionCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ActionCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sActionCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ActionCode), input_name='ActionCode')), namespaceprefix_ , eol_)) + if self.OptOutOfSPE is not None: + namespaceprefix_ = self.OptOutOfSPE_nsprefix_ + ':' if (UseCapturedNS_ and self.OptOutOfSPE_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOptOutOfSPE>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.OptOutOfSPE, input_name='OptOutOfSPE'), namespaceprefix_ , eol_)) + if self.PermitNumber is not None: + namespaceprefix_ = self.PermitNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PermitNumber), input_name='PermitNumber')), namespaceprefix_ , eol_)) + if self.AccountZipCode is not None: + namespaceprefix_ = self.AccountZipCode_nsprefix_ + ':' if (UseCapturedNS_ and self.AccountZipCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAccountZipCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AccountZipCode), input_name='AccountZipCode')), namespaceprefix_ , eol_)) + if self.ImportersReferenceType is not None: + namespaceprefix_ = self.ImportersReferenceType_nsprefix_ + ':' if (UseCapturedNS_ and self.ImportersReferenceType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImportersReferenceType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImportersReferenceType), input_name='ImportersReferenceType')), namespaceprefix_ , eol_)) + if self.ImportersTelephoneNumber is not None: + namespaceprefix_ = self.ImportersTelephoneNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.ImportersTelephoneNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImportersTelephoneNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImportersTelephoneNumber), input_name='ImportersTelephoneNumber')), namespaceprefix_ , eol_)) + if self.ImportersFaxNumber is not None: + namespaceprefix_ = self.ImportersFaxNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.ImportersFaxNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImportersFaxNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImportersFaxNumber), input_name='ImportersFaxNumber')), namespaceprefix_ , eol_)) + if self.ImportersEmail is not None: + namespaceprefix_ = self.ImportersEmail_nsprefix_ + ':' if (UseCapturedNS_ and self.ImportersEmail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImportersEmail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImportersEmail), input_name='ImportersEmail')), namespaceprefix_ , eol_)) + if self.Machinable is not None: + namespaceprefix_ = self.Machinable_nsprefix_ + ':' if (UseCapturedNS_ and self.Machinable_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMachinable>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.Machinable, input_name='Machinable'), namespaceprefix_ , eol_)) + if self.DestinationRateIndicator is not None: + namespaceprefix_ = self.DestinationRateIndicator_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationRateIndicator_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationRateIndicator>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DestinationRateIndicator), input_name='DestinationRateIndicator')), namespaceprefix_ , eol_)) + if self.MID is not None: + namespaceprefix_ = self.MID_nsprefix_ + ':' if (UseCapturedNS_ and self.MID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MID), input_name='MID')), namespaceprefix_ , eol_)) + if self.LogisticsManagerMID is not None: + namespaceprefix_ = self.LogisticsManagerMID_nsprefix_ + ':' if (UseCapturedNS_ and self.LogisticsManagerMID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLogisticsManagerMID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LogisticsManagerMID), input_name='LogisticsManagerMID')), namespaceprefix_ , eol_)) + if self.CRID is not None: + namespaceprefix_ = self.CRID_nsprefix_ + ':' if (UseCapturedNS_ and self.CRID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCRID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CRID), input_name='CRID')), namespaceprefix_ , eol_)) + if self.VendorCode is not None: + namespaceprefix_ = self.VendorCode_nsprefix_ + ':' if (UseCapturedNS_ and self.VendorCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sVendorCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.VendorCode), input_name='VendorCode')), namespaceprefix_ , eol_)) + if self.VendorProductVersionNumber is not None: + namespaceprefix_ = self.VendorProductVersionNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.VendorProductVersionNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sVendorProductVersionNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.VendorProductVersionNumber), input_name='VendorProductVersionNumber')), namespaceprefix_ , eol_)) + if self.ePostageMailerReporting is not None: + namespaceprefix_ = self.ePostageMailerReporting_nsprefix_ + ':' if (UseCapturedNS_ and self.ePostageMailerReporting_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sePostageMailerReporting>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ePostageMailerReporting), input_name='ePostageMailerReporting')), namespaceprefix_ , eol_)) + if self.SenderFirstName is not None: + namespaceprefix_ = self.SenderFirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderFirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderFirstName), input_name='SenderFirstName')), namespaceprefix_ , eol_)) + if self.SenderLastName is not None: + namespaceprefix_ = self.SenderLastName_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderLastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderLastName), input_name='SenderLastName')), namespaceprefix_ , eol_)) + if self.SenderBusinessName is not None: + namespaceprefix_ = self.SenderBusinessName_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderBusinessName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderBusinessName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderBusinessName), input_name='SenderBusinessName')), namespaceprefix_ , eol_)) + if self.SenderAddress1 is not None: + namespaceprefix_ = self.SenderAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderAddress1), input_name='SenderAddress1')), namespaceprefix_ , eol_)) + if self.SenderCity is not None: + namespaceprefix_ = self.SenderCity_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderCity), input_name='SenderCity')), namespaceprefix_ , eol_)) + if self.SenderState is not None: + namespaceprefix_ = self.SenderState_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderState), input_name='SenderState')), namespaceprefix_ , eol_)) + if self.SenderZip5 is not None: + namespaceprefix_ = self.SenderZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderZip5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderZip5), input_name='SenderZip5')), namespaceprefix_ , eol_)) + if self.SenderPhone is not None: + namespaceprefix_ = self.SenderPhone_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderPhone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderPhone), input_name='SenderPhone')), namespaceprefix_ , eol_)) + if self.SenderEmail is not None: + namespaceprefix_ = self.SenderEmail_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderEmail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderEmail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderEmail), input_name='SenderEmail')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Option': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Option') + value_ = self.gds_validate_string(value_, node, 'Option') + self.Option = value_ + self.Option_nsprefix_ = child_.prefix + elif nodeName_ == 'Revision': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Revision') + value_ = self.gds_validate_string(value_, node, 'Revision') + self.Revision = value_ + self.Revision_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageParameters': + obj_ = ImageParametersType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ImageParameters = obj_ + obj_.original_tagname_ = 'ImageParameters' + elif nodeName_ == 'FromFirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromFirstName') + value_ = self.gds_validate_string(value_, node, 'FromFirstName') + self.FromFirstName = value_ + self.FromFirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'FromMiddleInitial': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromMiddleInitial') + value_ = self.gds_validate_string(value_, node, 'FromMiddleInitial') + self.FromMiddleInitial = value_ + self.FromMiddleInitial_nsprefix_ = child_.prefix + elif nodeName_ == 'FromLastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromLastName') + value_ = self.gds_validate_string(value_, node, 'FromLastName') + self.FromLastName = value_ + self.FromLastName_nsprefix_ = child_.prefix + elif nodeName_ == 'FromFirm': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromFirm') + value_ = self.gds_validate_string(value_, node, 'FromFirm') + self.FromFirm = value_ + self.FromFirm_nsprefix_ = child_.prefix + elif nodeName_ == 'FromAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromAddress1') + value_ = self.gds_validate_string(value_, node, 'FromAddress1') + self.FromAddress1 = value_ + self.FromAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'FromAddress2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromAddress2') + value_ = self.gds_validate_string(value_, node, 'FromAddress2') + self.FromAddress2 = value_ + self.FromAddress2_nsprefix_ = child_.prefix + elif nodeName_ == 'FromUrbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromUrbanization') + value_ = self.gds_validate_string(value_, node, 'FromUrbanization') + self.FromUrbanization = value_ + self.FromUrbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'FromCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromCity') + value_ = self.gds_validate_string(value_, node, 'FromCity') + self.FromCity = value_ + self.FromCity_nsprefix_ = child_.prefix + elif nodeName_ == 'FromState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromState') + value_ = self.gds_validate_string(value_, node, 'FromState') + self.FromState = value_ + self.FromState_nsprefix_ = child_.prefix + elif nodeName_ == 'FromZip5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromZip5') + value_ = self.gds_validate_string(value_, node, 'FromZip5') + self.FromZip5 = value_ + self.FromZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'FromZip4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromZip4') + value_ = self.gds_validate_string(value_, node, 'FromZip4') + self.FromZip4 = value_ + self.FromZip4_nsprefix_ = child_.prefix + elif nodeName_ == 'FromPhone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromPhone') + value_ = self.gds_validate_string(value_, node, 'FromPhone') + self.FromPhone = value_ + self.FromPhone_nsprefix_ = child_.prefix + elif nodeName_ == 'FromCustomsReference': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromCustomsReference') + value_ = self.gds_validate_string(value_, node, 'FromCustomsReference') + self.FromCustomsReference = value_ + self.FromCustomsReference_nsprefix_ = child_.prefix + elif nodeName_ == 'ToName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToName') + value_ = self.gds_validate_string(value_, node, 'ToName') + self.ToName = value_ + self.ToName_nsprefix_ = child_.prefix + elif nodeName_ == 'ToFirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToFirstName') + value_ = self.gds_validate_string(value_, node, 'ToFirstName') + self.ToFirstName = value_ + self.ToFirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'ToLastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToLastName') + value_ = self.gds_validate_string(value_, node, 'ToLastName') + self.ToLastName = value_ + self.ToLastName_nsprefix_ = child_.prefix + elif nodeName_ == 'ToFirm': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToFirm') + value_ = self.gds_validate_string(value_, node, 'ToFirm') + self.ToFirm = value_ + self.ToFirm_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress1') + value_ = self.gds_validate_string(value_, node, 'ToAddress1') + self.ToAddress1 = value_ + self.ToAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress2') + value_ = self.gds_validate_string(value_, node, 'ToAddress2') + self.ToAddress2 = value_ + self.ToAddress2_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress3': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress3') + value_ = self.gds_validate_string(value_, node, 'ToAddress3') + self.ToAddress3 = value_ + self.ToAddress3_nsprefix_ = child_.prefix + elif nodeName_ == 'ToCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToCity') + value_ = self.gds_validate_string(value_, node, 'ToCity') + self.ToCity = value_ + self.ToCity_nsprefix_ = child_.prefix + elif nodeName_ == 'ToProvince': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToProvince') + value_ = self.gds_validate_string(value_, node, 'ToProvince') + self.ToProvince = value_ + self.ToProvince_nsprefix_ = child_.prefix + elif nodeName_ == 'ToCountry': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToCountry') + value_ = self.gds_validate_string(value_, node, 'ToCountry') + self.ToCountry = value_ + self.ToCountry_nsprefix_ = child_.prefix + elif nodeName_ == 'ToPostalCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToPostalCode') + value_ = self.gds_validate_string(value_, node, 'ToPostalCode') + self.ToPostalCode = value_ + self.ToPostalCode_nsprefix_ = child_.prefix + elif nodeName_ == 'ToPOBoxFlag': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToPOBoxFlag') + value_ = self.gds_validate_string(value_, node, 'ToPOBoxFlag') + self.ToPOBoxFlag = value_ + self.ToPOBoxFlag_nsprefix_ = child_.prefix + elif nodeName_ == 'ToPhone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToPhone') + value_ = self.gds_validate_string(value_, node, 'ToPhone') + self.ToPhone = value_ + self.ToPhone_nsprefix_ = child_.prefix + elif nodeName_ == 'ToFax': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToFax') + value_ = self.gds_validate_string(value_, node, 'ToFax') + self.ToFax = value_ + self.ToFax_nsprefix_ = child_.prefix + elif nodeName_ == 'ToEmail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToEmail') + value_ = self.gds_validate_string(value_, node, 'ToEmail') + self.ToEmail = value_ + self.ToEmail_nsprefix_ = child_.prefix + elif nodeName_ == 'ImportersReferenceNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImportersReferenceNumber') + value_ = self.gds_validate_string(value_, node, 'ImportersReferenceNumber') + self.ImportersReferenceNumber = value_ + self.ImportersReferenceNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'NonDeliveryOption': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'NonDeliveryOption') + value_ = self.gds_validate_string(value_, node, 'NonDeliveryOption') + self.NonDeliveryOption = value_ + self.NonDeliveryOption_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectName') + value_ = self.gds_validate_string(value_, node, 'RedirectName') + self.RedirectName = value_ + self.RedirectName_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectEmail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectEmail') + value_ = self.gds_validate_string(value_, node, 'RedirectEmail') + self.RedirectEmail = value_ + self.RedirectEmail_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectSMS': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectSMS') + value_ = self.gds_validate_string(value_, node, 'RedirectSMS') + self.RedirectSMS = value_ + self.RedirectSMS_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectAddress': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectAddress') + value_ = self.gds_validate_string(value_, node, 'RedirectAddress') + self.RedirectAddress = value_ + self.RedirectAddress_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectCity') + value_ = self.gds_validate_string(value_, node, 'RedirectCity') + self.RedirectCity = value_ + self.RedirectCity_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectState') + value_ = self.gds_validate_string(value_, node, 'RedirectState') + self.RedirectState = value_ + self.RedirectState_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectZipCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectZipCode') + value_ = self.gds_validate_string(value_, node, 'RedirectZipCode') + self.RedirectZipCode = value_ + self.RedirectZipCode_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectZip4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectZip4') + value_ = self.gds_validate_string(value_, node, 'RedirectZip4') + self.RedirectZip4 = value_ + self.RedirectZip4_nsprefix_ = child_.prefix + elif nodeName_ == 'Container': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Container') + value_ = self.gds_validate_string(value_, node, 'Container') + self.Container = value_ + self.Container_nsprefix_ = child_.prefix + elif nodeName_ == 'ShippingContents': + obj_ = ShippingContentsType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ShippingContents = obj_ + obj_.original_tagname_ = 'ShippingContents' + elif nodeName_ == 'InsuredNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'InsuredNumber') + value_ = self.gds_validate_string(value_, node, 'InsuredNumber') + self.InsuredNumber = value_ + self.InsuredNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'InsuredAmount' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'InsuredAmount') + fval_ = self.gds_validate_decimal(fval_, node, 'InsuredAmount') + self.InsuredAmount = fval_ + self.InsuredAmount_nsprefix_ = child_.prefix + elif nodeName_ == 'Postage': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Postage') + value_ = self.gds_validate_string(value_, node, 'Postage') + self.Postage = value_ + self.Postage_nsprefix_ = child_.prefix + elif nodeName_ == 'GrossPounds' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'GrossPounds') + fval_ = self.gds_validate_float(fval_, node, 'GrossPounds') + self.GrossPounds = fval_ + self.GrossPounds_nsprefix_ = child_.prefix + elif nodeName_ == 'GrossOunces' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'GrossOunces') + fval_ = self.gds_validate_float(fval_, node, 'GrossOunces') + self.GrossOunces = fval_ + self.GrossOunces_nsprefix_ = child_.prefix + elif nodeName_ == 'ContentType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentType') + value_ = self.gds_validate_string(value_, node, 'ContentType') + self.ContentType = value_ + self.ContentType_nsprefix_ = child_.prefix + elif nodeName_ == 'ContentTypeOther': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentTypeOther') + value_ = self.gds_validate_string(value_, node, 'ContentTypeOther') + self.ContentTypeOther = value_ + self.ContentTypeOther_nsprefix_ = child_.prefix + elif nodeName_ == 'Agreement': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Agreement') + value_ = self.gds_validate_string(value_, node, 'Agreement') + self.Agreement = value_ + self.Agreement_nsprefix_ = child_.prefix + elif nodeName_ == 'Comments': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Comments') + value_ = self.gds_validate_string(value_, node, 'Comments') + self.Comments = value_ + self.Comments_nsprefix_ = child_.prefix + elif nodeName_ == 'LicenseNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LicenseNumber') + value_ = self.gds_validate_string(value_, node, 'LicenseNumber') + self.LicenseNumber = value_ + self.LicenseNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'CertificateNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CertificateNumber') + value_ = self.gds_validate_string(value_, node, 'CertificateNumber') + self.CertificateNumber = value_ + self.CertificateNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'InvoiceNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'InvoiceNumber') + value_ = self.gds_validate_string(value_, node, 'InvoiceNumber') + self.InvoiceNumber = value_ + self.InvoiceNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageType') + value_ = self.gds_validate_string(value_, node, 'ImageType') + self.ImageType = value_ + self.ImageType_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageLayout': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageLayout') + value_ = self.gds_validate_string(value_, node, 'ImageLayout') + self.ImageLayout = value_ + self.ImageLayout_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerRefNo': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerRefNo') + value_ = self.gds_validate_string(value_, node, 'CustomerRefNo') + self.CustomerRefNo = value_ + self.CustomerRefNo_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerRefNo2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerRefNo2') + value_ = self.gds_validate_string(value_, node, 'CustomerRefNo2') + self.CustomerRefNo2 = value_ + self.CustomerRefNo2_nsprefix_ = child_.prefix + elif nodeName_ == 'POZipCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'POZipCode') + value_ = self.gds_validate_string(value_, node, 'POZipCode') + self.POZipCode = value_ + self.POZipCode_nsprefix_ = child_.prefix + elif nodeName_ == 'LabelDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LabelDate') + value_ = self.gds_validate_string(value_, node, 'LabelDate') + self.LabelDate = value_ + self.LabelDate_nsprefix_ = child_.prefix + elif nodeName_ == 'EMCAAccount': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EMCAAccount') + value_ = self.gds_validate_string(value_, node, 'EMCAAccount') + self.EMCAAccount = value_ + self.EMCAAccount_nsprefix_ = child_.prefix + elif nodeName_ == 'HoldForManifest': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'HoldForManifest') + value_ = self.gds_validate_string(value_, node, 'HoldForManifest') + self.HoldForManifest = value_ + self.HoldForManifest_nsprefix_ = child_.prefix + elif nodeName_ == 'EELPFC': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EELPFC') + value_ = self.gds_validate_string(value_, node, 'EELPFC') + self.EELPFC = value_ + self.EELPFC_nsprefix_ = child_.prefix + elif nodeName_ == 'PriceOptions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PriceOptions') + value_ = self.gds_validate_string(value_, node, 'PriceOptions') + self.PriceOptions = value_ + self.PriceOptions_nsprefix_ = child_.prefix + elif nodeName_ == 'Length' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Length') + fval_ = self.gds_validate_float(fval_, node, 'Length') + self.Length = fval_ + self.Length_nsprefix_ = child_.prefix + elif nodeName_ == 'Width' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Width') + fval_ = self.gds_validate_float(fval_, node, 'Width') + self.Width = fval_ + self.Width_nsprefix_ = child_.prefix + elif nodeName_ == 'Height' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Height') + fval_ = self.gds_validate_float(fval_, node, 'Height') + self.Height = fval_ + self.Height_nsprefix_ = child_.prefix + elif nodeName_ == 'Girth' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Girth') + fval_ = self.gds_validate_float(fval_, node, 'Girth') + self.Girth = fval_ + self.Girth_nsprefix_ = child_.prefix + elif nodeName_ == 'LabelTime': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LabelTime') + value_ = self.gds_validate_string(value_, node, 'LabelTime') + self.LabelTime = value_ + self.LabelTime_nsprefix_ = child_.prefix + elif nodeName_ == 'MeterPaymentFlag': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'MeterPaymentFlag') + value_ = self.gds_validate_string(value_, node, 'MeterPaymentFlag') + self.MeterPaymentFlag = value_ + self.MeterPaymentFlag_nsprefix_ = child_.prefix + elif nodeName_ == 'ActionCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ActionCode') + value_ = self.gds_validate_string(value_, node, 'ActionCode') + self.ActionCode = value_ + self.ActionCode_nsprefix_ = child_.prefix + elif nodeName_ == 'OptOutOfSPE': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'OptOutOfSPE') + ival_ = self.gds_validate_boolean(ival_, node, 'OptOutOfSPE') + self.OptOutOfSPE = ival_ + self.OptOutOfSPE_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PermitNumber') + value_ = self.gds_validate_string(value_, node, 'PermitNumber') + self.PermitNumber = value_ + self.PermitNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'AccountZipCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AccountZipCode') + value_ = self.gds_validate_string(value_, node, 'AccountZipCode') + self.AccountZipCode = value_ + self.AccountZipCode_nsprefix_ = child_.prefix + elif nodeName_ == 'ImportersReferenceType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImportersReferenceType') + value_ = self.gds_validate_string(value_, node, 'ImportersReferenceType') + self.ImportersReferenceType = value_ + self.ImportersReferenceType_nsprefix_ = child_.prefix + elif nodeName_ == 'ImportersTelephoneNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImportersTelephoneNumber') + value_ = self.gds_validate_string(value_, node, 'ImportersTelephoneNumber') + self.ImportersTelephoneNumber = value_ + self.ImportersTelephoneNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'ImportersFaxNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImportersFaxNumber') + value_ = self.gds_validate_string(value_, node, 'ImportersFaxNumber') + self.ImportersFaxNumber = value_ + self.ImportersFaxNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'ImportersEmail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImportersEmail') + value_ = self.gds_validate_string(value_, node, 'ImportersEmail') + self.ImportersEmail = value_ + self.ImportersEmail_nsprefix_ = child_.prefix + elif nodeName_ == 'Machinable': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'Machinable') + ival_ = self.gds_validate_boolean(ival_, node, 'Machinable') + self.Machinable = ival_ + self.Machinable_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationRateIndicator': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DestinationRateIndicator') + value_ = self.gds_validate_string(value_, node, 'DestinationRateIndicator') + self.DestinationRateIndicator = value_ + self.DestinationRateIndicator_nsprefix_ = child_.prefix + elif nodeName_ == 'MID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'MID') + value_ = self.gds_validate_string(value_, node, 'MID') + self.MID = value_ + self.MID_nsprefix_ = child_.prefix + elif nodeName_ == 'LogisticsManagerMID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LogisticsManagerMID') + value_ = self.gds_validate_string(value_, node, 'LogisticsManagerMID') + self.LogisticsManagerMID = value_ + self.LogisticsManagerMID_nsprefix_ = child_.prefix + elif nodeName_ == 'CRID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CRID') + value_ = self.gds_validate_string(value_, node, 'CRID') + self.CRID = value_ + self.CRID_nsprefix_ = child_.prefix + elif nodeName_ == 'VendorCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'VendorCode') + value_ = self.gds_validate_string(value_, node, 'VendorCode') + self.VendorCode = value_ + self.VendorCode_nsprefix_ = child_.prefix + elif nodeName_ == 'VendorProductVersionNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'VendorProductVersionNumber') + value_ = self.gds_validate_string(value_, node, 'VendorProductVersionNumber') + self.VendorProductVersionNumber = value_ + self.VendorProductVersionNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'ePostageMailerReporting': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ePostageMailerReporting') + value_ = self.gds_validate_string(value_, node, 'ePostageMailerReporting') + self.ePostageMailerReporting = value_ + self.ePostageMailerReporting_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderFirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderFirstName') + value_ = self.gds_validate_string(value_, node, 'SenderFirstName') + self.SenderFirstName = value_ + self.SenderFirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderLastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderLastName') + value_ = self.gds_validate_string(value_, node, 'SenderLastName') + self.SenderLastName = value_ + self.SenderLastName_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderBusinessName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderBusinessName') + value_ = self.gds_validate_string(value_, node, 'SenderBusinessName') + self.SenderBusinessName = value_ + self.SenderBusinessName_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderAddress1') + value_ = self.gds_validate_string(value_, node, 'SenderAddress1') + self.SenderAddress1 = value_ + self.SenderAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderCity') + value_ = self.gds_validate_string(value_, node, 'SenderCity') + self.SenderCity = value_ + self.SenderCity_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderState') + value_ = self.gds_validate_string(value_, node, 'SenderState') + self.SenderState = value_ + self.SenderState_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderZip5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderZip5') + value_ = self.gds_validate_string(value_, node, 'SenderZip5') + self.SenderZip5 = value_ + self.SenderZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderPhone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderPhone') + value_ = self.gds_validate_string(value_, node, 'SenderPhone') + self.SenderPhone = value_ + self.SenderPhone_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderEmail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderEmail') + value_ = self.gds_validate_string(value_, node, 'SenderEmail') + self.SenderEmail = value_ + self.SenderEmail_nsprefix_ = child_.prefix +# end class eVSExpressMailIntlRequest + + +class ImageParametersType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ImageParameter=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ImageParameter = ImageParameter + self.ImageParameter_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ImageParametersType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ImageParametersType.subclass: + return ImageParametersType.subclass(*args_, **kwargs_) + else: + return ImageParametersType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ImageParameter(self): + return self.ImageParameter + def set_ImageParameter(self, ImageParameter): + self.ImageParameter = ImageParameter + def has__content(self): + if ( + self.ImageParameter is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ImageParametersType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ImageParametersType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ImageParametersType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ImageParametersType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ImageParametersType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ImageParametersType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ImageParametersType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ImageParameter is not None: + namespaceprefix_ = self.ImageParameter_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageParameter_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageParameter>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageParameter), input_name='ImageParameter')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ImageParameter': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageParameter') + value_ = self.gds_validate_string(value_, node, 'ImageParameter') + self.ImageParameter = value_ + self.ImageParameter_nsprefix_ = child_.prefix +# end class ImageParametersType + + +class ShippingContentsType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ItemDetail=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if ItemDetail is None: + self.ItemDetail = [] + else: + self.ItemDetail = ItemDetail + self.ItemDetail_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ShippingContentsType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ShippingContentsType.subclass: + return ShippingContentsType.subclass(*args_, **kwargs_) + else: + return ShippingContentsType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ItemDetail(self): + return self.ItemDetail + def set_ItemDetail(self, ItemDetail): + self.ItemDetail = ItemDetail + def add_ItemDetail(self, value): + self.ItemDetail.append(value) + def insert_ItemDetail_at(self, index, value): + self.ItemDetail.insert(index, value) + def replace_ItemDetail_at(self, index, value): + self.ItemDetail[index] = value + def has__content(self): + if ( + self.ItemDetail + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ShippingContentsType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ShippingContentsType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ShippingContentsType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ShippingContentsType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ShippingContentsType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ShippingContentsType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ShippingContentsType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for ItemDetail_ in self.ItemDetail: + namespaceprefix_ = self.ItemDetail_nsprefix_ + ':' if (UseCapturedNS_ and self.ItemDetail_nsprefix_) else '' + ItemDetail_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ItemDetail', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ItemDetail': + obj_ = ItemDetailType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ItemDetail.append(obj_) + obj_.original_tagname_ = 'ItemDetail' +# end class ShippingContentsType + + +class ItemDetailType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Description=None, Quantity=None, Value=None, NetPounds=None, NetOunces=None, HSTariffNumber=None, CountryOfOrigin=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Description = Description + self.Description_nsprefix_ = None + self.Quantity = Quantity + self.Quantity_nsprefix_ = None + self.Value = Value + self.Value_nsprefix_ = None + self.NetPounds = NetPounds + self.NetPounds_nsprefix_ = None + self.NetOunces = NetOunces + self.NetOunces_nsprefix_ = None + self.HSTariffNumber = HSTariffNumber + self.HSTariffNumber_nsprefix_ = None + self.CountryOfOrigin = CountryOfOrigin + self.CountryOfOrigin_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ItemDetailType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ItemDetailType.subclass: + return ItemDetailType.subclass(*args_, **kwargs_) + else: + return ItemDetailType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Description(self): + return self.Description + def set_Description(self, Description): + self.Description = Description + def get_Quantity(self): + return self.Quantity + def set_Quantity(self, Quantity): + self.Quantity = Quantity + def get_Value(self): + return self.Value + def set_Value(self, Value): + self.Value = Value + def get_NetPounds(self): + return self.NetPounds + def set_NetPounds(self, NetPounds): + self.NetPounds = NetPounds + def get_NetOunces(self): + return self.NetOunces + def set_NetOunces(self, NetOunces): + self.NetOunces = NetOunces + def get_HSTariffNumber(self): + return self.HSTariffNumber + def set_HSTariffNumber(self, HSTariffNumber): + self.HSTariffNumber = HSTariffNumber + def get_CountryOfOrigin(self): + return self.CountryOfOrigin + def set_CountryOfOrigin(self, CountryOfOrigin): + self.CountryOfOrigin = CountryOfOrigin + def has__content(self): + if ( + self.Description is not None or + self.Quantity is not None or + self.Value is not None or + self.NetPounds is not None or + self.NetOunces is not None or + self.HSTariffNumber is not None or + self.CountryOfOrigin is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ItemDetailType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ItemDetailType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ItemDetailType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ItemDetailType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ItemDetailType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ItemDetailType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ItemDetailType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Description is not None: + namespaceprefix_ = self.Description_nsprefix_ + ':' if (UseCapturedNS_ and self.Description_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDescription>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Description), input_name='Description')), namespaceprefix_ , eol_)) + if self.Quantity is not None: + namespaceprefix_ = self.Quantity_nsprefix_ + ':' if (UseCapturedNS_ and self.Quantity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sQuantity>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Quantity, input_name='Quantity'), namespaceprefix_ , eol_)) + if self.Value is not None: + namespaceprefix_ = self.Value_nsprefix_ + ':' if (UseCapturedNS_ and self.Value_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sValue>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Value, input_name='Value'), namespaceprefix_ , eol_)) + if self.NetPounds is not None: + namespaceprefix_ = self.NetPounds_nsprefix_ + ':' if (UseCapturedNS_ and self.NetPounds_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNetPounds>%s%s' % (namespaceprefix_ , self.gds_format_float(self.NetPounds, input_name='NetPounds'), namespaceprefix_ , eol_)) + if self.NetOunces is not None: + namespaceprefix_ = self.NetOunces_nsprefix_ + ':' if (UseCapturedNS_ and self.NetOunces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNetOunces>%s%s' % (namespaceprefix_ , self.gds_format_float(self.NetOunces, input_name='NetOunces'), namespaceprefix_ , eol_)) + if self.HSTariffNumber is not None: + namespaceprefix_ = self.HSTariffNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.HSTariffNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHSTariffNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.HSTariffNumber), input_name='HSTariffNumber')), namespaceprefix_ , eol_)) + if self.CountryOfOrigin is not None: + namespaceprefix_ = self.CountryOfOrigin_nsprefix_ + ':' if (UseCapturedNS_ and self.CountryOfOrigin_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCountryOfOrigin>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CountryOfOrigin), input_name='CountryOfOrigin')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Description': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Description') + value_ = self.gds_validate_string(value_, node, 'Description') + self.Description = value_ + self.Description_nsprefix_ = child_.prefix + elif nodeName_ == 'Quantity' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Quantity') + ival_ = self.gds_validate_integer(ival_, node, 'Quantity') + self.Quantity = ival_ + self.Quantity_nsprefix_ = child_.prefix + elif nodeName_ == 'Value' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Value') + fval_ = self.gds_validate_float(fval_, node, 'Value') + self.Value = fval_ + self.Value_nsprefix_ = child_.prefix + elif nodeName_ == 'NetPounds' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'NetPounds') + fval_ = self.gds_validate_float(fval_, node, 'NetPounds') + self.NetPounds = fval_ + self.NetPounds_nsprefix_ = child_.prefix + elif nodeName_ == 'NetOunces' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'NetOunces') + fval_ = self.gds_validate_float(fval_, node, 'NetOunces') + self.NetOunces = fval_ + self.NetOunces_nsprefix_ = child_.prefix + elif nodeName_ == 'HSTariffNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'HSTariffNumber') + value_ = self.gds_validate_string(value_, node, 'HSTariffNumber') + self.HSTariffNumber = value_ + self.HSTariffNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'CountryOfOrigin': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CountryOfOrigin') + value_ = self.gds_validate_string(value_, node, 'CountryOfOrigin') + self.CountryOfOrigin = value_ + self.CountryOfOrigin_nsprefix_ = child_.prefix +# end class ItemDetailType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSExpressMailIntlRequest' + rootClass = eVSExpressMailIntlRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSExpressMailIntlRequest' + rootClass = eVSExpressMailIntlRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSExpressMailIntlRequest' + rootClass = eVSExpressMailIntlRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSExpressMailIntlRequest' + rootClass = eVSExpressMailIntlRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from evs_express_mail_intl_request import *\n\n') + sys.stdout.write('import evs_express_mail_intl_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "ImageParametersType", + "ItemDetailType", + "ShippingContentsType", + "eVSExpressMailIntlRequest" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/evs_express_mail_intl_response.py b/modules/connectors/usps/karrio/schemas/usps/evs_express_mail_intl_response.py new file mode 100644 index 0000000000..470a320c0e --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/evs_express_mail_intl_response.py @@ -0,0 +1,1600 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:48 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/evs_express_mail_intl_response.py') +# +# Command line arguments: +# ./schemas/eVSExpressMailIntlResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/evs_express_mail_intl_response.py" ./schemas/eVSExpressMailIntlResponse.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class eVSExpressMailIntlResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Postage=None, TotalValue=None, SDRValue=None, BarcodeNumber=None, LabelImage=None, Page2Image=None, Page3Image=None, Page4Image=None, Page5Image=None, Page6Image=None, Prohibitions=None, Restrictions=None, Observations=None, Regulations=None, AdditionalRestrictions=None, InsuranceFee=None, GuaranteeAvailability=None, RemainingBarcodes=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Postage = Postage + self.Postage_nsprefix_ = None + self.TotalValue = TotalValue + self.TotalValue_nsprefix_ = None + self.SDRValue = SDRValue + self.SDRValue_nsprefix_ = None + self.BarcodeNumber = BarcodeNumber + self.BarcodeNumber_nsprefix_ = None + self.LabelImage = LabelImage + self.LabelImage_nsprefix_ = None + self.Page2Image = Page2Image + self.Page2Image_nsprefix_ = None + self.Page3Image = Page3Image + self.Page3Image_nsprefix_ = None + self.Page4Image = Page4Image + self.Page4Image_nsprefix_ = None + self.Page5Image = Page5Image + self.Page5Image_nsprefix_ = None + self.Page6Image = Page6Image + self.Page6Image_nsprefix_ = None + self.Prohibitions = Prohibitions + self.Prohibitions_nsprefix_ = None + self.Restrictions = Restrictions + self.Restrictions_nsprefix_ = None + self.Observations = Observations + self.Observations_nsprefix_ = None + self.Regulations = Regulations + self.Regulations_nsprefix_ = None + self.AdditionalRestrictions = AdditionalRestrictions + self.AdditionalRestrictions_nsprefix_ = None + self.InsuranceFee = InsuranceFee + self.InsuranceFee_nsprefix_ = None + self.GuaranteeAvailability = GuaranteeAvailability + self.GuaranteeAvailability_nsprefix_ = None + self.RemainingBarcodes = RemainingBarcodes + self.RemainingBarcodes_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, eVSExpressMailIntlResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if eVSExpressMailIntlResponse.subclass: + return eVSExpressMailIntlResponse.subclass(*args_, **kwargs_) + else: + return eVSExpressMailIntlResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Postage(self): + return self.Postage + def set_Postage(self, Postage): + self.Postage = Postage + def get_TotalValue(self): + return self.TotalValue + def set_TotalValue(self, TotalValue): + self.TotalValue = TotalValue + def get_SDRValue(self): + return self.SDRValue + def set_SDRValue(self, SDRValue): + self.SDRValue = SDRValue + def get_BarcodeNumber(self): + return self.BarcodeNumber + def set_BarcodeNumber(self, BarcodeNumber): + self.BarcodeNumber = BarcodeNumber + def get_LabelImage(self): + return self.LabelImage + def set_LabelImage(self, LabelImage): + self.LabelImage = LabelImage + def get_Page2Image(self): + return self.Page2Image + def set_Page2Image(self, Page2Image): + self.Page2Image = Page2Image + def get_Page3Image(self): + return self.Page3Image + def set_Page3Image(self, Page3Image): + self.Page3Image = Page3Image + def get_Page4Image(self): + return self.Page4Image + def set_Page4Image(self, Page4Image): + self.Page4Image = Page4Image + def get_Page5Image(self): + return self.Page5Image + def set_Page5Image(self, Page5Image): + self.Page5Image = Page5Image + def get_Page6Image(self): + return self.Page6Image + def set_Page6Image(self, Page6Image): + self.Page6Image = Page6Image + def get_Prohibitions(self): + return self.Prohibitions + def set_Prohibitions(self, Prohibitions): + self.Prohibitions = Prohibitions + def get_Restrictions(self): + return self.Restrictions + def set_Restrictions(self, Restrictions): + self.Restrictions = Restrictions + def get_Observations(self): + return self.Observations + def set_Observations(self, Observations): + self.Observations = Observations + def get_Regulations(self): + return self.Regulations + def set_Regulations(self, Regulations): + self.Regulations = Regulations + def get_AdditionalRestrictions(self): + return self.AdditionalRestrictions + def set_AdditionalRestrictions(self, AdditionalRestrictions): + self.AdditionalRestrictions = AdditionalRestrictions + def get_InsuranceFee(self): + return self.InsuranceFee + def set_InsuranceFee(self, InsuranceFee): + self.InsuranceFee = InsuranceFee + def get_GuaranteeAvailability(self): + return self.GuaranteeAvailability + def set_GuaranteeAvailability(self, GuaranteeAvailability): + self.GuaranteeAvailability = GuaranteeAvailability + def get_RemainingBarcodes(self): + return self.RemainingBarcodes + def set_RemainingBarcodes(self, RemainingBarcodes): + self.RemainingBarcodes = RemainingBarcodes + def has__content(self): + if ( + self.Postage is not None or + self.TotalValue is not None or + self.SDRValue is not None or + self.BarcodeNumber is not None or + self.LabelImage is not None or + self.Page2Image is not None or + self.Page3Image is not None or + self.Page4Image is not None or + self.Page5Image is not None or + self.Page6Image is not None or + self.Prohibitions is not None or + self.Restrictions is not None or + self.Observations is not None or + self.Regulations is not None or + self.AdditionalRestrictions is not None or + self.InsuranceFee is not None or + self.GuaranteeAvailability is not None or + self.RemainingBarcodes is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSExpressMailIntlResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('eVSExpressMailIntlResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'eVSExpressMailIntlResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='eVSExpressMailIntlResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='eVSExpressMailIntlResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='eVSExpressMailIntlResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSExpressMailIntlResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Postage is not None: + namespaceprefix_ = self.Postage_nsprefix_ + ':' if (UseCapturedNS_ and self.Postage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPostage>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Postage, input_name='Postage'), namespaceprefix_ , eol_)) + if self.TotalValue is not None: + namespaceprefix_ = self.TotalValue_nsprefix_ + ':' if (UseCapturedNS_ and self.TotalValue_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTotalValue>%s%s' % (namespaceprefix_ , self.gds_format_float(self.TotalValue, input_name='TotalValue'), namespaceprefix_ , eol_)) + if self.SDRValue is not None: + namespaceprefix_ = self.SDRValue_nsprefix_ + ':' if (UseCapturedNS_ and self.SDRValue_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSDRValue>%s%s' % (namespaceprefix_ , self.gds_format_float(self.SDRValue, input_name='SDRValue'), namespaceprefix_ , eol_)) + if self.BarcodeNumber is not None: + namespaceprefix_ = self.BarcodeNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.BarcodeNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBarcodeNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BarcodeNumber), input_name='BarcodeNumber')), namespaceprefix_ , eol_)) + if self.LabelImage is not None: + namespaceprefix_ = self.LabelImage_nsprefix_ + ':' if (UseCapturedNS_ and self.LabelImage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLabelImage>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LabelImage), input_name='LabelImage')), namespaceprefix_ , eol_)) + if self.Page2Image is not None: + namespaceprefix_ = self.Page2Image_nsprefix_ + ':' if (UseCapturedNS_ and self.Page2Image_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPage2Image>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Page2Image), input_name='Page2Image')), namespaceprefix_ , eol_)) + if self.Page3Image is not None: + namespaceprefix_ = self.Page3Image_nsprefix_ + ':' if (UseCapturedNS_ and self.Page3Image_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPage3Image>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Page3Image), input_name='Page3Image')), namespaceprefix_ , eol_)) + if self.Page4Image is not None: + namespaceprefix_ = self.Page4Image_nsprefix_ + ':' if (UseCapturedNS_ and self.Page4Image_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPage4Image>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Page4Image), input_name='Page4Image')), namespaceprefix_ , eol_)) + if self.Page5Image is not None: + namespaceprefix_ = self.Page5Image_nsprefix_ + ':' if (UseCapturedNS_ and self.Page5Image_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPage5Image>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Page5Image), input_name='Page5Image')), namespaceprefix_ , eol_)) + if self.Page6Image is not None: + namespaceprefix_ = self.Page6Image_nsprefix_ + ':' if (UseCapturedNS_ and self.Page6Image_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPage6Image>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Page6Image), input_name='Page6Image')), namespaceprefix_ , eol_)) + if self.Prohibitions is not None: + namespaceprefix_ = self.Prohibitions_nsprefix_ + ':' if (UseCapturedNS_ and self.Prohibitions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sProhibitions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Prohibitions), input_name='Prohibitions')), namespaceprefix_ , eol_)) + if self.Restrictions is not None: + namespaceprefix_ = self.Restrictions_nsprefix_ + ':' if (UseCapturedNS_ and self.Restrictions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRestrictions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Restrictions), input_name='Restrictions')), namespaceprefix_ , eol_)) + if self.Observations is not None: + namespaceprefix_ = self.Observations_nsprefix_ + ':' if (UseCapturedNS_ and self.Observations_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sObservations>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Observations), input_name='Observations')), namespaceprefix_ , eol_)) + if self.Regulations is not None: + namespaceprefix_ = self.Regulations_nsprefix_ + ':' if (UseCapturedNS_ and self.Regulations_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRegulations>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Regulations), input_name='Regulations')), namespaceprefix_ , eol_)) + if self.AdditionalRestrictions is not None: + namespaceprefix_ = self.AdditionalRestrictions_nsprefix_ + ':' if (UseCapturedNS_ and self.AdditionalRestrictions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAdditionalRestrictions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AdditionalRestrictions), input_name='AdditionalRestrictions')), namespaceprefix_ , eol_)) + if self.InsuranceFee is not None: + namespaceprefix_ = self.InsuranceFee_nsprefix_ + ':' if (UseCapturedNS_ and self.InsuranceFee_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInsuranceFee>%s%s' % (namespaceprefix_ , self.gds_format_float(self.InsuranceFee, input_name='InsuranceFee'), namespaceprefix_ , eol_)) + if self.GuaranteeAvailability is not None: + namespaceprefix_ = self.GuaranteeAvailability_nsprefix_ + ':' if (UseCapturedNS_ and self.GuaranteeAvailability_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGuaranteeAvailability>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.GuaranteeAvailability), input_name='GuaranteeAvailability')), namespaceprefix_ , eol_)) + if self.RemainingBarcodes is not None: + namespaceprefix_ = self.RemainingBarcodes_nsprefix_ + ':' if (UseCapturedNS_ and self.RemainingBarcodes_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRemainingBarcodes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RemainingBarcodes), input_name='RemainingBarcodes')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Postage' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Postage') + fval_ = self.gds_validate_float(fval_, node, 'Postage') + self.Postage = fval_ + self.Postage_nsprefix_ = child_.prefix + elif nodeName_ == 'TotalValue' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'TotalValue') + fval_ = self.gds_validate_float(fval_, node, 'TotalValue') + self.TotalValue = fval_ + self.TotalValue_nsprefix_ = child_.prefix + elif nodeName_ == 'SDRValue' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'SDRValue') + fval_ = self.gds_validate_float(fval_, node, 'SDRValue') + self.SDRValue = fval_ + self.SDRValue_nsprefix_ = child_.prefix + elif nodeName_ == 'BarcodeNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BarcodeNumber') + value_ = self.gds_validate_string(value_, node, 'BarcodeNumber') + self.BarcodeNumber = value_ + self.BarcodeNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'LabelImage': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LabelImage') + value_ = self.gds_validate_string(value_, node, 'LabelImage') + self.LabelImage = value_ + self.LabelImage_nsprefix_ = child_.prefix + elif nodeName_ == 'Page2Image': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Page2Image') + value_ = self.gds_validate_string(value_, node, 'Page2Image') + self.Page2Image = value_ + self.Page2Image_nsprefix_ = child_.prefix + elif nodeName_ == 'Page3Image': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Page3Image') + value_ = self.gds_validate_string(value_, node, 'Page3Image') + self.Page3Image = value_ + self.Page3Image_nsprefix_ = child_.prefix + elif nodeName_ == 'Page4Image': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Page4Image') + value_ = self.gds_validate_string(value_, node, 'Page4Image') + self.Page4Image = value_ + self.Page4Image_nsprefix_ = child_.prefix + elif nodeName_ == 'Page5Image': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Page5Image') + value_ = self.gds_validate_string(value_, node, 'Page5Image') + self.Page5Image = value_ + self.Page5Image_nsprefix_ = child_.prefix + elif nodeName_ == 'Page6Image': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Page6Image') + value_ = self.gds_validate_string(value_, node, 'Page6Image') + self.Page6Image = value_ + self.Page6Image_nsprefix_ = child_.prefix + elif nodeName_ == 'Prohibitions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Prohibitions') + value_ = self.gds_validate_string(value_, node, 'Prohibitions') + self.Prohibitions = value_ + self.Prohibitions_nsprefix_ = child_.prefix + elif nodeName_ == 'Restrictions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Restrictions') + value_ = self.gds_validate_string(value_, node, 'Restrictions') + self.Restrictions = value_ + self.Restrictions_nsprefix_ = child_.prefix + elif nodeName_ == 'Observations': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Observations') + value_ = self.gds_validate_string(value_, node, 'Observations') + self.Observations = value_ + self.Observations_nsprefix_ = child_.prefix + elif nodeName_ == 'Regulations': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Regulations') + value_ = self.gds_validate_string(value_, node, 'Regulations') + self.Regulations = value_ + self.Regulations_nsprefix_ = child_.prefix + elif nodeName_ == 'AdditionalRestrictions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AdditionalRestrictions') + value_ = self.gds_validate_string(value_, node, 'AdditionalRestrictions') + self.AdditionalRestrictions = value_ + self.AdditionalRestrictions_nsprefix_ = child_.prefix + elif nodeName_ == 'InsuranceFee' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'InsuranceFee') + fval_ = self.gds_validate_float(fval_, node, 'InsuranceFee') + self.InsuranceFee = fval_ + self.InsuranceFee_nsprefix_ = child_.prefix + elif nodeName_ == 'GuaranteeAvailability': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'GuaranteeAvailability') + value_ = self.gds_validate_string(value_, node, 'GuaranteeAvailability') + self.GuaranteeAvailability = value_ + self.GuaranteeAvailability_nsprefix_ = child_.prefix + elif nodeName_ == 'RemainingBarcodes': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RemainingBarcodes') + value_ = self.gds_validate_string(value_, node, 'RemainingBarcodes') + self.RemainingBarcodes = value_ + self.RemainingBarcodes_nsprefix_ = child_.prefix +# end class eVSExpressMailIntlResponse + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSExpressMailIntlResponse' + rootClass = eVSExpressMailIntlResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSExpressMailIntlResponse' + rootClass = eVSExpressMailIntlResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSExpressMailIntlResponse' + rootClass = eVSExpressMailIntlResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSExpressMailIntlResponse' + rootClass = eVSExpressMailIntlResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from evs_express_mail_intl_response import *\n\n') + sys.stdout.write('import evs_express_mail_intl_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "eVSExpressMailIntlResponse" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/evs_first_class_mail_intl_request.py b/modules/connectors/usps/karrio/schemas/usps/evs_first_class_mail_intl_request.py new file mode 100644 index 0000000000..ce60c8ec96 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/evs_first_class_mail_intl_request.py @@ -0,0 +1,3174 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:48 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/evs_first_class_mail_intl_request.py') +# +# Command line arguments: +# ./schemas/eVSFirstClassMailIntlRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/evs_first_class_mail_intl_request.py" ./schemas/eVSFirstClassMailIntlRequest.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class eVSFirstClassMailIntlRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, Option=None, Revision=None, ImageParameters=None, FromFirstName=None, FromMiddleInitial=None, FromLastName=None, FromFirm=None, FromAddress1=None, FromAddress2=None, FromUrbanization=None, FromCity=None, FromState=None, FromZip5=None, FromZip4=None, FromPhone=None, ToName=None, ToFirstName=None, ToLastName=None, ToFirm=None, ToAddress1=None, ToAddress2=None, ToAddress3=None, ToCity=None, ToProvince=None, ToCountry=None, ToPostalCode=None, ToPOBoxFlag=None, ToPhone=None, ToFax=None, ToEmail=None, FirstClassMailType=None, ShippingContents=None, Postage=None, GrossPounds=None, GrossOunces=None, ContentType=None, ContentTypeOther=None, Agreement=None, Comments=None, LicenseNumber=None, CertificateNumber=None, InvoiceNumber=None, ImageType=None, ImageLayout=None, CustomerRefNo=None, CustomerRefNo2=None, POZipCode=None, LabelDate=None, HoldForManifest=None, EELPFC=None, Container=None, Length=None, Width=None, Height=None, Girth=None, ExtraServices=None, PriceOptions=None, ActionCode=None, OptOutOfSPE=None, PermitNumber=None, AccountZipCode=None, Machinable=None, DestinationRateIndicator=None, MID=None, LogisticsManagerMID=None, CRID=None, VendorCode=None, VendorProductVersionNumber=None, ePostageMailerReporting=None, SenderFirstName=None, SenderLastName=None, SenderBusinessName=None, SenderAddress1=None, SenderCity=None, SenderState=None, SenderZip5=None, SenderPhone=None, SenderEmail=None, RemainingBarcodes=None, ChargebackCode=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.Option = Option + self.Option_nsprefix_ = None + self.Revision = Revision + self.Revision_nsprefix_ = None + self.ImageParameters = ImageParameters + self.ImageParameters_nsprefix_ = None + self.FromFirstName = FromFirstName + self.FromFirstName_nsprefix_ = None + self.FromMiddleInitial = FromMiddleInitial + self.FromMiddleInitial_nsprefix_ = None + self.FromLastName = FromLastName + self.FromLastName_nsprefix_ = None + self.FromFirm = FromFirm + self.FromFirm_nsprefix_ = None + self.FromAddress1 = FromAddress1 + self.FromAddress1_nsprefix_ = None + self.FromAddress2 = FromAddress2 + self.FromAddress2_nsprefix_ = None + self.FromUrbanization = FromUrbanization + self.FromUrbanization_nsprefix_ = None + self.FromCity = FromCity + self.FromCity_nsprefix_ = None + self.FromState = FromState + self.FromState_nsprefix_ = None + self.FromZip5 = FromZip5 + self.FromZip5_nsprefix_ = None + self.FromZip4 = FromZip4 + self.FromZip4_nsprefix_ = None + self.FromPhone = FromPhone + self.FromPhone_nsprefix_ = None + self.ToName = ToName + self.ToName_nsprefix_ = None + self.ToFirstName = ToFirstName + self.ToFirstName_nsprefix_ = None + self.ToLastName = ToLastName + self.ToLastName_nsprefix_ = None + self.ToFirm = ToFirm + self.ToFirm_nsprefix_ = None + self.ToAddress1 = ToAddress1 + self.ToAddress1_nsprefix_ = None + self.ToAddress2 = ToAddress2 + self.ToAddress2_nsprefix_ = None + self.ToAddress3 = ToAddress3 + self.ToAddress3_nsprefix_ = None + self.ToCity = ToCity + self.ToCity_nsprefix_ = None + self.ToProvince = ToProvince + self.ToProvince_nsprefix_ = None + self.ToCountry = ToCountry + self.ToCountry_nsprefix_ = None + self.ToPostalCode = ToPostalCode + self.ToPostalCode_nsprefix_ = None + self.ToPOBoxFlag = ToPOBoxFlag + self.ToPOBoxFlag_nsprefix_ = None + self.ToPhone = ToPhone + self.ToPhone_nsprefix_ = None + self.ToFax = ToFax + self.ToFax_nsprefix_ = None + self.ToEmail = ToEmail + self.ToEmail_nsprefix_ = None + self.FirstClassMailType = FirstClassMailType + self.FirstClassMailType_nsprefix_ = None + self.ShippingContents = ShippingContents + self.ShippingContents_nsprefix_ = None + self.Postage = Postage + self.Postage_nsprefix_ = None + self.GrossPounds = GrossPounds + self.GrossPounds_nsprefix_ = None + self.GrossOunces = GrossOunces + self.GrossOunces_nsprefix_ = None + self.ContentType = ContentType + self.ContentType_nsprefix_ = None + self.ContentTypeOther = ContentTypeOther + self.ContentTypeOther_nsprefix_ = None + self.Agreement = Agreement + self.Agreement_nsprefix_ = None + self.Comments = Comments + self.Comments_nsprefix_ = None + self.LicenseNumber = LicenseNumber + self.LicenseNumber_nsprefix_ = None + self.CertificateNumber = CertificateNumber + self.CertificateNumber_nsprefix_ = None + self.InvoiceNumber = InvoiceNumber + self.InvoiceNumber_nsprefix_ = None + self.ImageType = ImageType + self.ImageType_nsprefix_ = None + self.ImageLayout = ImageLayout + self.ImageLayout_nsprefix_ = None + self.CustomerRefNo = CustomerRefNo + self.CustomerRefNo_nsprefix_ = None + self.CustomerRefNo2 = CustomerRefNo2 + self.CustomerRefNo2_nsprefix_ = None + self.POZipCode = POZipCode + self.POZipCode_nsprefix_ = None + self.LabelDate = LabelDate + self.LabelDate_nsprefix_ = None + self.HoldForManifest = HoldForManifest + self.HoldForManifest_nsprefix_ = None + self.EELPFC = EELPFC + self.EELPFC_nsprefix_ = None + self.Container = Container + self.Container_nsprefix_ = None + self.Length = Length + self.Length_nsprefix_ = None + self.Width = Width + self.Width_nsprefix_ = None + self.Height = Height + self.Height_nsprefix_ = None + self.Girth = Girth + self.Girth_nsprefix_ = None + self.ExtraServices = ExtraServices + self.ExtraServices_nsprefix_ = None + self.PriceOptions = PriceOptions + self.PriceOptions_nsprefix_ = None + self.ActionCode = ActionCode + self.ActionCode_nsprefix_ = None + self.OptOutOfSPE = OptOutOfSPE + self.OptOutOfSPE_nsprefix_ = None + self.PermitNumber = PermitNumber + self.PermitNumber_nsprefix_ = None + self.AccountZipCode = AccountZipCode + self.AccountZipCode_nsprefix_ = None + self.Machinable = Machinable + self.Machinable_nsprefix_ = None + self.DestinationRateIndicator = DestinationRateIndicator + self.DestinationRateIndicator_nsprefix_ = None + self.MID = MID + self.MID_nsprefix_ = None + self.LogisticsManagerMID = LogisticsManagerMID + self.LogisticsManagerMID_nsprefix_ = None + self.CRID = CRID + self.CRID_nsprefix_ = None + self.VendorCode = VendorCode + self.VendorCode_nsprefix_ = None + self.VendorProductVersionNumber = VendorProductVersionNumber + self.VendorProductVersionNumber_nsprefix_ = None + self.ePostageMailerReporting = ePostageMailerReporting + self.ePostageMailerReporting_nsprefix_ = None + self.SenderFirstName = SenderFirstName + self.SenderFirstName_nsprefix_ = None + self.SenderLastName = SenderLastName + self.SenderLastName_nsprefix_ = None + self.SenderBusinessName = SenderBusinessName + self.SenderBusinessName_nsprefix_ = None + self.SenderAddress1 = SenderAddress1 + self.SenderAddress1_nsprefix_ = None + self.SenderCity = SenderCity + self.SenderCity_nsprefix_ = None + self.SenderState = SenderState + self.SenderState_nsprefix_ = None + self.SenderZip5 = SenderZip5 + self.SenderZip5_nsprefix_ = None + self.SenderPhone = SenderPhone + self.SenderPhone_nsprefix_ = None + self.SenderEmail = SenderEmail + self.SenderEmail_nsprefix_ = None + self.RemainingBarcodes = RemainingBarcodes + self.RemainingBarcodes_nsprefix_ = None + self.ChargebackCode = ChargebackCode + self.ChargebackCode_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, eVSFirstClassMailIntlRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if eVSFirstClassMailIntlRequest.subclass: + return eVSFirstClassMailIntlRequest.subclass(*args_, **kwargs_) + else: + return eVSFirstClassMailIntlRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Option(self): + return self.Option + def set_Option(self, Option): + self.Option = Option + def get_Revision(self): + return self.Revision + def set_Revision(self, Revision): + self.Revision = Revision + def get_ImageParameters(self): + return self.ImageParameters + def set_ImageParameters(self, ImageParameters): + self.ImageParameters = ImageParameters + def get_FromFirstName(self): + return self.FromFirstName + def set_FromFirstName(self, FromFirstName): + self.FromFirstName = FromFirstName + def get_FromMiddleInitial(self): + return self.FromMiddleInitial + def set_FromMiddleInitial(self, FromMiddleInitial): + self.FromMiddleInitial = FromMiddleInitial + def get_FromLastName(self): + return self.FromLastName + def set_FromLastName(self, FromLastName): + self.FromLastName = FromLastName + def get_FromFirm(self): + return self.FromFirm + def set_FromFirm(self, FromFirm): + self.FromFirm = FromFirm + def get_FromAddress1(self): + return self.FromAddress1 + def set_FromAddress1(self, FromAddress1): + self.FromAddress1 = FromAddress1 + def get_FromAddress2(self): + return self.FromAddress2 + def set_FromAddress2(self, FromAddress2): + self.FromAddress2 = FromAddress2 + def get_FromUrbanization(self): + return self.FromUrbanization + def set_FromUrbanization(self, FromUrbanization): + self.FromUrbanization = FromUrbanization + def get_FromCity(self): + return self.FromCity + def set_FromCity(self, FromCity): + self.FromCity = FromCity + def get_FromState(self): + return self.FromState + def set_FromState(self, FromState): + self.FromState = FromState + def get_FromZip5(self): + return self.FromZip5 + def set_FromZip5(self, FromZip5): + self.FromZip5 = FromZip5 + def get_FromZip4(self): + return self.FromZip4 + def set_FromZip4(self, FromZip4): + self.FromZip4 = FromZip4 + def get_FromPhone(self): + return self.FromPhone + def set_FromPhone(self, FromPhone): + self.FromPhone = FromPhone + def get_ToName(self): + return self.ToName + def set_ToName(self, ToName): + self.ToName = ToName + def get_ToFirstName(self): + return self.ToFirstName + def set_ToFirstName(self, ToFirstName): + self.ToFirstName = ToFirstName + def get_ToLastName(self): + return self.ToLastName + def set_ToLastName(self, ToLastName): + self.ToLastName = ToLastName + def get_ToFirm(self): + return self.ToFirm + def set_ToFirm(self, ToFirm): + self.ToFirm = ToFirm + def get_ToAddress1(self): + return self.ToAddress1 + def set_ToAddress1(self, ToAddress1): + self.ToAddress1 = ToAddress1 + def get_ToAddress2(self): + return self.ToAddress2 + def set_ToAddress2(self, ToAddress2): + self.ToAddress2 = ToAddress2 + def get_ToAddress3(self): + return self.ToAddress3 + def set_ToAddress3(self, ToAddress3): + self.ToAddress3 = ToAddress3 + def get_ToCity(self): + return self.ToCity + def set_ToCity(self, ToCity): + self.ToCity = ToCity + def get_ToProvince(self): + return self.ToProvince + def set_ToProvince(self, ToProvince): + self.ToProvince = ToProvince + def get_ToCountry(self): + return self.ToCountry + def set_ToCountry(self, ToCountry): + self.ToCountry = ToCountry + def get_ToPostalCode(self): + return self.ToPostalCode + def set_ToPostalCode(self, ToPostalCode): + self.ToPostalCode = ToPostalCode + def get_ToPOBoxFlag(self): + return self.ToPOBoxFlag + def set_ToPOBoxFlag(self, ToPOBoxFlag): + self.ToPOBoxFlag = ToPOBoxFlag + def get_ToPhone(self): + return self.ToPhone + def set_ToPhone(self, ToPhone): + self.ToPhone = ToPhone + def get_ToFax(self): + return self.ToFax + def set_ToFax(self, ToFax): + self.ToFax = ToFax + def get_ToEmail(self): + return self.ToEmail + def set_ToEmail(self, ToEmail): + self.ToEmail = ToEmail + def get_FirstClassMailType(self): + return self.FirstClassMailType + def set_FirstClassMailType(self, FirstClassMailType): + self.FirstClassMailType = FirstClassMailType + def get_ShippingContents(self): + return self.ShippingContents + def set_ShippingContents(self, ShippingContents): + self.ShippingContents = ShippingContents + def get_Postage(self): + return self.Postage + def set_Postage(self, Postage): + self.Postage = Postage + def get_GrossPounds(self): + return self.GrossPounds + def set_GrossPounds(self, GrossPounds): + self.GrossPounds = GrossPounds + def get_GrossOunces(self): + return self.GrossOunces + def set_GrossOunces(self, GrossOunces): + self.GrossOunces = GrossOunces + def get_ContentType(self): + return self.ContentType + def set_ContentType(self, ContentType): + self.ContentType = ContentType + def get_ContentTypeOther(self): + return self.ContentTypeOther + def set_ContentTypeOther(self, ContentTypeOther): + self.ContentTypeOther = ContentTypeOther + def get_Agreement(self): + return self.Agreement + def set_Agreement(self, Agreement): + self.Agreement = Agreement + def get_Comments(self): + return self.Comments + def set_Comments(self, Comments): + self.Comments = Comments + def get_LicenseNumber(self): + return self.LicenseNumber + def set_LicenseNumber(self, LicenseNumber): + self.LicenseNumber = LicenseNumber + def get_CertificateNumber(self): + return self.CertificateNumber + def set_CertificateNumber(self, CertificateNumber): + self.CertificateNumber = CertificateNumber + def get_InvoiceNumber(self): + return self.InvoiceNumber + def set_InvoiceNumber(self, InvoiceNumber): + self.InvoiceNumber = InvoiceNumber + def get_ImageType(self): + return self.ImageType + def set_ImageType(self, ImageType): + self.ImageType = ImageType + def get_ImageLayout(self): + return self.ImageLayout + def set_ImageLayout(self, ImageLayout): + self.ImageLayout = ImageLayout + def get_CustomerRefNo(self): + return self.CustomerRefNo + def set_CustomerRefNo(self, CustomerRefNo): + self.CustomerRefNo = CustomerRefNo + def get_CustomerRefNo2(self): + return self.CustomerRefNo2 + def set_CustomerRefNo2(self, CustomerRefNo2): + self.CustomerRefNo2 = CustomerRefNo2 + def get_POZipCode(self): + return self.POZipCode + def set_POZipCode(self, POZipCode): + self.POZipCode = POZipCode + def get_LabelDate(self): + return self.LabelDate + def set_LabelDate(self, LabelDate): + self.LabelDate = LabelDate + def get_HoldForManifest(self): + return self.HoldForManifest + def set_HoldForManifest(self, HoldForManifest): + self.HoldForManifest = HoldForManifest + def get_EELPFC(self): + return self.EELPFC + def set_EELPFC(self, EELPFC): + self.EELPFC = EELPFC + def get_Container(self): + return self.Container + def set_Container(self, Container): + self.Container = Container + def get_Length(self): + return self.Length + def set_Length(self, Length): + self.Length = Length + def get_Width(self): + return self.Width + def set_Width(self, Width): + self.Width = Width + def get_Height(self): + return self.Height + def set_Height(self, Height): + self.Height = Height + def get_Girth(self): + return self.Girth + def set_Girth(self, Girth): + self.Girth = Girth + def get_ExtraServices(self): + return self.ExtraServices + def set_ExtraServices(self, ExtraServices): + self.ExtraServices = ExtraServices + def get_PriceOptions(self): + return self.PriceOptions + def set_PriceOptions(self, PriceOptions): + self.PriceOptions = PriceOptions + def get_ActionCode(self): + return self.ActionCode + def set_ActionCode(self, ActionCode): + self.ActionCode = ActionCode + def get_OptOutOfSPE(self): + return self.OptOutOfSPE + def set_OptOutOfSPE(self, OptOutOfSPE): + self.OptOutOfSPE = OptOutOfSPE + def get_PermitNumber(self): + return self.PermitNumber + def set_PermitNumber(self, PermitNumber): + self.PermitNumber = PermitNumber + def get_AccountZipCode(self): + return self.AccountZipCode + def set_AccountZipCode(self, AccountZipCode): + self.AccountZipCode = AccountZipCode + def get_Machinable(self): + return self.Machinable + def set_Machinable(self, Machinable): + self.Machinable = Machinable + def get_DestinationRateIndicator(self): + return self.DestinationRateIndicator + def set_DestinationRateIndicator(self, DestinationRateIndicator): + self.DestinationRateIndicator = DestinationRateIndicator + def get_MID(self): + return self.MID + def set_MID(self, MID): + self.MID = MID + def get_LogisticsManagerMID(self): + return self.LogisticsManagerMID + def set_LogisticsManagerMID(self, LogisticsManagerMID): + self.LogisticsManagerMID = LogisticsManagerMID + def get_CRID(self): + return self.CRID + def set_CRID(self, CRID): + self.CRID = CRID + def get_VendorCode(self): + return self.VendorCode + def set_VendorCode(self, VendorCode): + self.VendorCode = VendorCode + def get_VendorProductVersionNumber(self): + return self.VendorProductVersionNumber + def set_VendorProductVersionNumber(self, VendorProductVersionNumber): + self.VendorProductVersionNumber = VendorProductVersionNumber + def get_ePostageMailerReporting(self): + return self.ePostageMailerReporting + def set_ePostageMailerReporting(self, ePostageMailerReporting): + self.ePostageMailerReporting = ePostageMailerReporting + def get_SenderFirstName(self): + return self.SenderFirstName + def set_SenderFirstName(self, SenderFirstName): + self.SenderFirstName = SenderFirstName + def get_SenderLastName(self): + return self.SenderLastName + def set_SenderLastName(self, SenderLastName): + self.SenderLastName = SenderLastName + def get_SenderBusinessName(self): + return self.SenderBusinessName + def set_SenderBusinessName(self, SenderBusinessName): + self.SenderBusinessName = SenderBusinessName + def get_SenderAddress1(self): + return self.SenderAddress1 + def set_SenderAddress1(self, SenderAddress1): + self.SenderAddress1 = SenderAddress1 + def get_SenderCity(self): + return self.SenderCity + def set_SenderCity(self, SenderCity): + self.SenderCity = SenderCity + def get_SenderState(self): + return self.SenderState + def set_SenderState(self, SenderState): + self.SenderState = SenderState + def get_SenderZip5(self): + return self.SenderZip5 + def set_SenderZip5(self, SenderZip5): + self.SenderZip5 = SenderZip5 + def get_SenderPhone(self): + return self.SenderPhone + def set_SenderPhone(self, SenderPhone): + self.SenderPhone = SenderPhone + def get_SenderEmail(self): + return self.SenderEmail + def set_SenderEmail(self, SenderEmail): + self.SenderEmail = SenderEmail + def get_RemainingBarcodes(self): + return self.RemainingBarcodes + def set_RemainingBarcodes(self, RemainingBarcodes): + self.RemainingBarcodes = RemainingBarcodes + def get_ChargebackCode(self): + return self.ChargebackCode + def set_ChargebackCode(self, ChargebackCode): + self.ChargebackCode = ChargebackCode + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.Option is not None or + self.Revision is not None or + self.ImageParameters is not None or + self.FromFirstName is not None or + self.FromMiddleInitial is not None or + self.FromLastName is not None or + self.FromFirm is not None or + self.FromAddress1 is not None or + self.FromAddress2 is not None or + self.FromUrbanization is not None or + self.FromCity is not None or + self.FromState is not None or + self.FromZip5 is not None or + self.FromZip4 is not None or + self.FromPhone is not None or + self.ToName is not None or + self.ToFirstName is not None or + self.ToLastName is not None or + self.ToFirm is not None or + self.ToAddress1 is not None or + self.ToAddress2 is not None or + self.ToAddress3 is not None or + self.ToCity is not None or + self.ToProvince is not None or + self.ToCountry is not None or + self.ToPostalCode is not None or + self.ToPOBoxFlag is not None or + self.ToPhone is not None or + self.ToFax is not None or + self.ToEmail is not None or + self.FirstClassMailType is not None or + self.ShippingContents is not None or + self.Postage is not None or + self.GrossPounds is not None or + self.GrossOunces is not None or + self.ContentType is not None or + self.ContentTypeOther is not None or + self.Agreement is not None or + self.Comments is not None or + self.LicenseNumber is not None or + self.CertificateNumber is not None or + self.InvoiceNumber is not None or + self.ImageType is not None or + self.ImageLayout is not None or + self.CustomerRefNo is not None or + self.CustomerRefNo2 is not None or + self.POZipCode is not None or + self.LabelDate is not None or + self.HoldForManifest is not None or + self.EELPFC is not None or + self.Container is not None or + self.Length is not None or + self.Width is not None or + self.Height is not None or + self.Girth is not None or + self.ExtraServices is not None or + self.PriceOptions is not None or + self.ActionCode is not None or + self.OptOutOfSPE is not None or + self.PermitNumber is not None or + self.AccountZipCode is not None or + self.Machinable is not None or + self.DestinationRateIndicator is not None or + self.MID is not None or + self.LogisticsManagerMID is not None or + self.CRID is not None or + self.VendorCode is not None or + self.VendorProductVersionNumber is not None or + self.ePostageMailerReporting is not None or + self.SenderFirstName is not None or + self.SenderLastName is not None or + self.SenderBusinessName is not None or + self.SenderAddress1 is not None or + self.SenderCity is not None or + self.SenderState is not None or + self.SenderZip5 is not None or + self.SenderPhone is not None or + self.SenderEmail is not None or + self.RemainingBarcodes is not None or + self.ChargebackCode is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSFirstClassMailIntlRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('eVSFirstClassMailIntlRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'eVSFirstClassMailIntlRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='eVSFirstClassMailIntlRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='eVSFirstClassMailIntlRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='eVSFirstClassMailIntlRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSFirstClassMailIntlRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Option is not None: + namespaceprefix_ = self.Option_nsprefix_ + ':' if (UseCapturedNS_ and self.Option_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOption>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Option), input_name='Option')), namespaceprefix_ , eol_)) + if self.Revision is not None: + namespaceprefix_ = self.Revision_nsprefix_ + ':' if (UseCapturedNS_ and self.Revision_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRevision>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Revision, input_name='Revision'), namespaceprefix_ , eol_)) + if self.ImageParameters is not None: + namespaceprefix_ = self.ImageParameters_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageParameters_nsprefix_) else '' + self.ImageParameters.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ImageParameters', pretty_print=pretty_print) + if self.FromFirstName is not None: + namespaceprefix_ = self.FromFirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.FromFirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromFirstName), input_name='FromFirstName')), namespaceprefix_ , eol_)) + if self.FromMiddleInitial is not None: + namespaceprefix_ = self.FromMiddleInitial_nsprefix_ + ':' if (UseCapturedNS_ and self.FromMiddleInitial_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromMiddleInitial>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromMiddleInitial), input_name='FromMiddleInitial')), namespaceprefix_ , eol_)) + if self.FromLastName is not None: + namespaceprefix_ = self.FromLastName_nsprefix_ + ':' if (UseCapturedNS_ and self.FromLastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromLastName), input_name='FromLastName')), namespaceprefix_ , eol_)) + if self.FromFirm is not None: + namespaceprefix_ = self.FromFirm_nsprefix_ + ':' if (UseCapturedNS_ and self.FromFirm_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromFirm>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromFirm), input_name='FromFirm')), namespaceprefix_ , eol_)) + if self.FromAddress1 is not None: + namespaceprefix_ = self.FromAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.FromAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromAddress1), input_name='FromAddress1')), namespaceprefix_ , eol_)) + if self.FromAddress2 is not None: + namespaceprefix_ = self.FromAddress2_nsprefix_ + ':' if (UseCapturedNS_ and self.FromAddress2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromAddress2), input_name='FromAddress2')), namespaceprefix_ , eol_)) + if self.FromUrbanization is not None: + namespaceprefix_ = self.FromUrbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.FromUrbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromUrbanization), input_name='FromUrbanization')), namespaceprefix_ , eol_)) + if self.FromCity is not None: + namespaceprefix_ = self.FromCity_nsprefix_ + ':' if (UseCapturedNS_ and self.FromCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromCity), input_name='FromCity')), namespaceprefix_ , eol_)) + if self.FromState is not None: + namespaceprefix_ = self.FromState_nsprefix_ + ':' if (UseCapturedNS_ and self.FromState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromState), input_name='FromState')), namespaceprefix_ , eol_)) + if self.FromZip5 is not None: + namespaceprefix_ = self.FromZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.FromZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromZip5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromZip5), input_name='FromZip5')), namespaceprefix_ , eol_)) + if self.FromZip4 is not None: + namespaceprefix_ = self.FromZip4_nsprefix_ + ':' if (UseCapturedNS_ and self.FromZip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromZip4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromZip4), input_name='FromZip4')), namespaceprefix_ , eol_)) + if self.FromPhone is not None: + namespaceprefix_ = self.FromPhone_nsprefix_ + ':' if (UseCapturedNS_ and self.FromPhone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromPhone), input_name='FromPhone')), namespaceprefix_ , eol_)) + if self.ToName is not None: + namespaceprefix_ = self.ToName_nsprefix_ + ':' if (UseCapturedNS_ and self.ToName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToName), input_name='ToName')), namespaceprefix_ , eol_)) + if self.ToFirstName is not None: + namespaceprefix_ = self.ToFirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.ToFirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToFirstName), input_name='ToFirstName')), namespaceprefix_ , eol_)) + if self.ToLastName is not None: + namespaceprefix_ = self.ToLastName_nsprefix_ + ':' if (UseCapturedNS_ and self.ToLastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToLastName), input_name='ToLastName')), namespaceprefix_ , eol_)) + if self.ToFirm is not None: + namespaceprefix_ = self.ToFirm_nsprefix_ + ':' if (UseCapturedNS_ and self.ToFirm_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToFirm>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToFirm), input_name='ToFirm')), namespaceprefix_ , eol_)) + if self.ToAddress1 is not None: + namespaceprefix_ = self.ToAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress1), input_name='ToAddress1')), namespaceprefix_ , eol_)) + if self.ToAddress2 is not None: + namespaceprefix_ = self.ToAddress2_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress2), input_name='ToAddress2')), namespaceprefix_ , eol_)) + if self.ToAddress3 is not None: + namespaceprefix_ = self.ToAddress3_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress3_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress3>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress3), input_name='ToAddress3')), namespaceprefix_ , eol_)) + if self.ToCity is not None: + namespaceprefix_ = self.ToCity_nsprefix_ + ':' if (UseCapturedNS_ and self.ToCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToCity), input_name='ToCity')), namespaceprefix_ , eol_)) + if self.ToProvince is not None: + namespaceprefix_ = self.ToProvince_nsprefix_ + ':' if (UseCapturedNS_ and self.ToProvince_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToProvince>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToProvince), input_name='ToProvince')), namespaceprefix_ , eol_)) + if self.ToCountry is not None: + namespaceprefix_ = self.ToCountry_nsprefix_ + ':' if (UseCapturedNS_ and self.ToCountry_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToCountry>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToCountry), input_name='ToCountry')), namespaceprefix_ , eol_)) + if self.ToPostalCode is not None: + namespaceprefix_ = self.ToPostalCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ToPostalCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToPostalCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToPostalCode), input_name='ToPostalCode')), namespaceprefix_ , eol_)) + if self.ToPOBoxFlag is not None: + namespaceprefix_ = self.ToPOBoxFlag_nsprefix_ + ':' if (UseCapturedNS_ and self.ToPOBoxFlag_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToPOBoxFlag>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToPOBoxFlag), input_name='ToPOBoxFlag')), namespaceprefix_ , eol_)) + if self.ToPhone is not None: + namespaceprefix_ = self.ToPhone_nsprefix_ + ':' if (UseCapturedNS_ and self.ToPhone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToPhone), input_name='ToPhone')), namespaceprefix_ , eol_)) + if self.ToFax is not None: + namespaceprefix_ = self.ToFax_nsprefix_ + ':' if (UseCapturedNS_ and self.ToFax_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToFax>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToFax), input_name='ToFax')), namespaceprefix_ , eol_)) + if self.ToEmail is not None: + namespaceprefix_ = self.ToEmail_nsprefix_ + ':' if (UseCapturedNS_ and self.ToEmail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToEmail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToEmail), input_name='ToEmail')), namespaceprefix_ , eol_)) + if self.FirstClassMailType is not None: + namespaceprefix_ = self.FirstClassMailType_nsprefix_ + ':' if (UseCapturedNS_ and self.FirstClassMailType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirstClassMailType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirstClassMailType), input_name='FirstClassMailType')), namespaceprefix_ , eol_)) + if self.ShippingContents is not None: + namespaceprefix_ = self.ShippingContents_nsprefix_ + ':' if (UseCapturedNS_ and self.ShippingContents_nsprefix_) else '' + self.ShippingContents.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ShippingContents', pretty_print=pretty_print) + if self.Postage is not None: + namespaceprefix_ = self.Postage_nsprefix_ + ':' if (UseCapturedNS_ and self.Postage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPostage>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Postage), input_name='Postage')), namespaceprefix_ , eol_)) + if self.GrossPounds is not None: + namespaceprefix_ = self.GrossPounds_nsprefix_ + ':' if (UseCapturedNS_ and self.GrossPounds_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGrossPounds>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.GrossPounds, input_name='GrossPounds'), namespaceprefix_ , eol_)) + if self.GrossOunces is not None: + namespaceprefix_ = self.GrossOunces_nsprefix_ + ':' if (UseCapturedNS_ and self.GrossOunces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGrossOunces>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.GrossOunces, input_name='GrossOunces'), namespaceprefix_ , eol_)) + if self.ContentType is not None: + namespaceprefix_ = self.ContentType_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentType), input_name='ContentType')), namespaceprefix_ , eol_)) + if self.ContentTypeOther is not None: + namespaceprefix_ = self.ContentTypeOther_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentTypeOther_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentTypeOther>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentTypeOther), input_name='ContentTypeOther')), namespaceprefix_ , eol_)) + if self.Agreement is not None: + namespaceprefix_ = self.Agreement_nsprefix_ + ':' if (UseCapturedNS_ and self.Agreement_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAgreement>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Agreement), input_name='Agreement')), namespaceprefix_ , eol_)) + if self.Comments is not None: + namespaceprefix_ = self.Comments_nsprefix_ + ':' if (UseCapturedNS_ and self.Comments_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sComments>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Comments), input_name='Comments')), namespaceprefix_ , eol_)) + if self.LicenseNumber is not None: + namespaceprefix_ = self.LicenseNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.LicenseNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLicenseNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LicenseNumber), input_name='LicenseNumber')), namespaceprefix_ , eol_)) + if self.CertificateNumber is not None: + namespaceprefix_ = self.CertificateNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.CertificateNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCertificateNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CertificateNumber), input_name='CertificateNumber')), namespaceprefix_ , eol_)) + if self.InvoiceNumber is not None: + namespaceprefix_ = self.InvoiceNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.InvoiceNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInvoiceNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InvoiceNumber), input_name='InvoiceNumber')), namespaceprefix_ , eol_)) + if self.ImageType is not None: + namespaceprefix_ = self.ImageType_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageType), input_name='ImageType')), namespaceprefix_ , eol_)) + if self.ImageLayout is not None: + namespaceprefix_ = self.ImageLayout_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageLayout_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageLayout>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageLayout), input_name='ImageLayout')), namespaceprefix_ , eol_)) + if self.CustomerRefNo is not None: + namespaceprefix_ = self.CustomerRefNo_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerRefNo_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerRefNo>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerRefNo), input_name='CustomerRefNo')), namespaceprefix_ , eol_)) + if self.CustomerRefNo2 is not None: + namespaceprefix_ = self.CustomerRefNo2_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerRefNo2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerRefNo2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerRefNo2), input_name='CustomerRefNo2')), namespaceprefix_ , eol_)) + if self.POZipCode is not None: + namespaceprefix_ = self.POZipCode_nsprefix_ + ':' if (UseCapturedNS_ and self.POZipCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPOZipCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.POZipCode), input_name='POZipCode')), namespaceprefix_ , eol_)) + if self.LabelDate is not None: + namespaceprefix_ = self.LabelDate_nsprefix_ + ':' if (UseCapturedNS_ and self.LabelDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLabelDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LabelDate), input_name='LabelDate')), namespaceprefix_ , eol_)) + if self.HoldForManifest is not None: + namespaceprefix_ = self.HoldForManifest_nsprefix_ + ':' if (UseCapturedNS_ and self.HoldForManifest_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHoldForManifest>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.HoldForManifest), input_name='HoldForManifest')), namespaceprefix_ , eol_)) + if self.EELPFC is not None: + namespaceprefix_ = self.EELPFC_nsprefix_ + ':' if (UseCapturedNS_ and self.EELPFC_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEELPFC>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EELPFC), input_name='EELPFC')), namespaceprefix_ , eol_)) + if self.Container is not None: + namespaceprefix_ = self.Container_nsprefix_ + ':' if (UseCapturedNS_ and self.Container_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContainer>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Container), input_name='Container')), namespaceprefix_ , eol_)) + if self.Length is not None: + namespaceprefix_ = self.Length_nsprefix_ + ':' if (UseCapturedNS_ and self.Length_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLength>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Length, input_name='Length'), namespaceprefix_ , eol_)) + if self.Width is not None: + namespaceprefix_ = self.Width_nsprefix_ + ':' if (UseCapturedNS_ and self.Width_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sWidth>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Width, input_name='Width'), namespaceprefix_ , eol_)) + if self.Height is not None: + namespaceprefix_ = self.Height_nsprefix_ + ':' if (UseCapturedNS_ and self.Height_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHeight>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Height, input_name='Height'), namespaceprefix_ , eol_)) + if self.Girth is not None: + namespaceprefix_ = self.Girth_nsprefix_ + ':' if (UseCapturedNS_ and self.Girth_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGirth>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Girth, input_name='Girth'), namespaceprefix_ , eol_)) + if self.ExtraServices is not None: + namespaceprefix_ = self.ExtraServices_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraServices_nsprefix_) else '' + self.ExtraServices.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ExtraServices', pretty_print=pretty_print) + if self.PriceOptions is not None: + namespaceprefix_ = self.PriceOptions_nsprefix_ + ':' if (UseCapturedNS_ and self.PriceOptions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPriceOptions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PriceOptions), input_name='PriceOptions')), namespaceprefix_ , eol_)) + if self.ActionCode is not None: + namespaceprefix_ = self.ActionCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ActionCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sActionCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ActionCode), input_name='ActionCode')), namespaceprefix_ , eol_)) + if self.OptOutOfSPE is not None: + namespaceprefix_ = self.OptOutOfSPE_nsprefix_ + ':' if (UseCapturedNS_ and self.OptOutOfSPE_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOptOutOfSPE>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.OptOutOfSPE, input_name='OptOutOfSPE'), namespaceprefix_ , eol_)) + if self.PermitNumber is not None: + namespaceprefix_ = self.PermitNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PermitNumber), input_name='PermitNumber')), namespaceprefix_ , eol_)) + if self.AccountZipCode is not None: + namespaceprefix_ = self.AccountZipCode_nsprefix_ + ':' if (UseCapturedNS_ and self.AccountZipCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAccountZipCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AccountZipCode), input_name='AccountZipCode')), namespaceprefix_ , eol_)) + if self.Machinable is not None: + namespaceprefix_ = self.Machinable_nsprefix_ + ':' if (UseCapturedNS_ and self.Machinable_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMachinable>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.Machinable, input_name='Machinable'), namespaceprefix_ , eol_)) + if self.DestinationRateIndicator is not None: + namespaceprefix_ = self.DestinationRateIndicator_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationRateIndicator_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationRateIndicator>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DestinationRateIndicator), input_name='DestinationRateIndicator')), namespaceprefix_ , eol_)) + if self.MID is not None: + namespaceprefix_ = self.MID_nsprefix_ + ':' if (UseCapturedNS_ and self.MID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MID), input_name='MID')), namespaceprefix_ , eol_)) + if self.LogisticsManagerMID is not None: + namespaceprefix_ = self.LogisticsManagerMID_nsprefix_ + ':' if (UseCapturedNS_ and self.LogisticsManagerMID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLogisticsManagerMID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LogisticsManagerMID), input_name='LogisticsManagerMID')), namespaceprefix_ , eol_)) + if self.CRID is not None: + namespaceprefix_ = self.CRID_nsprefix_ + ':' if (UseCapturedNS_ and self.CRID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCRID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CRID), input_name='CRID')), namespaceprefix_ , eol_)) + if self.VendorCode is not None: + namespaceprefix_ = self.VendorCode_nsprefix_ + ':' if (UseCapturedNS_ and self.VendorCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sVendorCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.VendorCode), input_name='VendorCode')), namespaceprefix_ , eol_)) + if self.VendorProductVersionNumber is not None: + namespaceprefix_ = self.VendorProductVersionNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.VendorProductVersionNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sVendorProductVersionNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.VendorProductVersionNumber), input_name='VendorProductVersionNumber')), namespaceprefix_ , eol_)) + if self.ePostageMailerReporting is not None: + namespaceprefix_ = self.ePostageMailerReporting_nsprefix_ + ':' if (UseCapturedNS_ and self.ePostageMailerReporting_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sePostageMailerReporting>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ePostageMailerReporting), input_name='ePostageMailerReporting')), namespaceprefix_ , eol_)) + if self.SenderFirstName is not None: + namespaceprefix_ = self.SenderFirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderFirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderFirstName), input_name='SenderFirstName')), namespaceprefix_ , eol_)) + if self.SenderLastName is not None: + namespaceprefix_ = self.SenderLastName_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderLastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderLastName), input_name='SenderLastName')), namespaceprefix_ , eol_)) + if self.SenderBusinessName is not None: + namespaceprefix_ = self.SenderBusinessName_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderBusinessName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderBusinessName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderBusinessName), input_name='SenderBusinessName')), namespaceprefix_ , eol_)) + if self.SenderAddress1 is not None: + namespaceprefix_ = self.SenderAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderAddress1), input_name='SenderAddress1')), namespaceprefix_ , eol_)) + if self.SenderCity is not None: + namespaceprefix_ = self.SenderCity_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderCity), input_name='SenderCity')), namespaceprefix_ , eol_)) + if self.SenderState is not None: + namespaceprefix_ = self.SenderState_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderState), input_name='SenderState')), namespaceprefix_ , eol_)) + if self.SenderZip5 is not None: + namespaceprefix_ = self.SenderZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderZip5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderZip5), input_name='SenderZip5')), namespaceprefix_ , eol_)) + if self.SenderPhone is not None: + namespaceprefix_ = self.SenderPhone_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderPhone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderPhone), input_name='SenderPhone')), namespaceprefix_ , eol_)) + if self.SenderEmail is not None: + namespaceprefix_ = self.SenderEmail_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderEmail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderEmail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderEmail), input_name='SenderEmail')), namespaceprefix_ , eol_)) + if self.RemainingBarcodes is not None: + namespaceprefix_ = self.RemainingBarcodes_nsprefix_ + ':' if (UseCapturedNS_ and self.RemainingBarcodes_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRemainingBarcodes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RemainingBarcodes), input_name='RemainingBarcodes')), namespaceprefix_ , eol_)) + if self.ChargebackCode is not None: + namespaceprefix_ = self.ChargebackCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ChargebackCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sChargebackCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ChargebackCode), input_name='ChargebackCode')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Option': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Option') + value_ = self.gds_validate_string(value_, node, 'Option') + self.Option = value_ + self.Option_nsprefix_ = child_.prefix + elif nodeName_ == 'Revision' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Revision') + ival_ = self.gds_validate_integer(ival_, node, 'Revision') + self.Revision = ival_ + self.Revision_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageParameters': + obj_ = ImageParametersType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ImageParameters = obj_ + obj_.original_tagname_ = 'ImageParameters' + elif nodeName_ == 'FromFirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromFirstName') + value_ = self.gds_validate_string(value_, node, 'FromFirstName') + self.FromFirstName = value_ + self.FromFirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'FromMiddleInitial': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromMiddleInitial') + value_ = self.gds_validate_string(value_, node, 'FromMiddleInitial') + self.FromMiddleInitial = value_ + self.FromMiddleInitial_nsprefix_ = child_.prefix + elif nodeName_ == 'FromLastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromLastName') + value_ = self.gds_validate_string(value_, node, 'FromLastName') + self.FromLastName = value_ + self.FromLastName_nsprefix_ = child_.prefix + elif nodeName_ == 'FromFirm': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromFirm') + value_ = self.gds_validate_string(value_, node, 'FromFirm') + self.FromFirm = value_ + self.FromFirm_nsprefix_ = child_.prefix + elif nodeName_ == 'FromAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromAddress1') + value_ = self.gds_validate_string(value_, node, 'FromAddress1') + self.FromAddress1 = value_ + self.FromAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'FromAddress2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromAddress2') + value_ = self.gds_validate_string(value_, node, 'FromAddress2') + self.FromAddress2 = value_ + self.FromAddress2_nsprefix_ = child_.prefix + elif nodeName_ == 'FromUrbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromUrbanization') + value_ = self.gds_validate_string(value_, node, 'FromUrbanization') + self.FromUrbanization = value_ + self.FromUrbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'FromCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromCity') + value_ = self.gds_validate_string(value_, node, 'FromCity') + self.FromCity = value_ + self.FromCity_nsprefix_ = child_.prefix + elif nodeName_ == 'FromState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromState') + value_ = self.gds_validate_string(value_, node, 'FromState') + self.FromState = value_ + self.FromState_nsprefix_ = child_.prefix + elif nodeName_ == 'FromZip5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromZip5') + value_ = self.gds_validate_string(value_, node, 'FromZip5') + self.FromZip5 = value_ + self.FromZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'FromZip4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromZip4') + value_ = self.gds_validate_string(value_, node, 'FromZip4') + self.FromZip4 = value_ + self.FromZip4_nsprefix_ = child_.prefix + elif nodeName_ == 'FromPhone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromPhone') + value_ = self.gds_validate_string(value_, node, 'FromPhone') + self.FromPhone = value_ + self.FromPhone_nsprefix_ = child_.prefix + elif nodeName_ == 'ToName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToName') + value_ = self.gds_validate_string(value_, node, 'ToName') + self.ToName = value_ + self.ToName_nsprefix_ = child_.prefix + elif nodeName_ == 'ToFirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToFirstName') + value_ = self.gds_validate_string(value_, node, 'ToFirstName') + self.ToFirstName = value_ + self.ToFirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'ToLastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToLastName') + value_ = self.gds_validate_string(value_, node, 'ToLastName') + self.ToLastName = value_ + self.ToLastName_nsprefix_ = child_.prefix + elif nodeName_ == 'ToFirm': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToFirm') + value_ = self.gds_validate_string(value_, node, 'ToFirm') + self.ToFirm = value_ + self.ToFirm_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress1') + value_ = self.gds_validate_string(value_, node, 'ToAddress1') + self.ToAddress1 = value_ + self.ToAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress2') + value_ = self.gds_validate_string(value_, node, 'ToAddress2') + self.ToAddress2 = value_ + self.ToAddress2_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress3': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress3') + value_ = self.gds_validate_string(value_, node, 'ToAddress3') + self.ToAddress3 = value_ + self.ToAddress3_nsprefix_ = child_.prefix + elif nodeName_ == 'ToCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToCity') + value_ = self.gds_validate_string(value_, node, 'ToCity') + self.ToCity = value_ + self.ToCity_nsprefix_ = child_.prefix + elif nodeName_ == 'ToProvince': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToProvince') + value_ = self.gds_validate_string(value_, node, 'ToProvince') + self.ToProvince = value_ + self.ToProvince_nsprefix_ = child_.prefix + elif nodeName_ == 'ToCountry': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToCountry') + value_ = self.gds_validate_string(value_, node, 'ToCountry') + self.ToCountry = value_ + self.ToCountry_nsprefix_ = child_.prefix + elif nodeName_ == 'ToPostalCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToPostalCode') + value_ = self.gds_validate_string(value_, node, 'ToPostalCode') + self.ToPostalCode = value_ + self.ToPostalCode_nsprefix_ = child_.prefix + elif nodeName_ == 'ToPOBoxFlag': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToPOBoxFlag') + value_ = self.gds_validate_string(value_, node, 'ToPOBoxFlag') + self.ToPOBoxFlag = value_ + self.ToPOBoxFlag_nsprefix_ = child_.prefix + elif nodeName_ == 'ToPhone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToPhone') + value_ = self.gds_validate_string(value_, node, 'ToPhone') + self.ToPhone = value_ + self.ToPhone_nsprefix_ = child_.prefix + elif nodeName_ == 'ToFax': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToFax') + value_ = self.gds_validate_string(value_, node, 'ToFax') + self.ToFax = value_ + self.ToFax_nsprefix_ = child_.prefix + elif nodeName_ == 'ToEmail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToEmail') + value_ = self.gds_validate_string(value_, node, 'ToEmail') + self.ToEmail = value_ + self.ToEmail_nsprefix_ = child_.prefix + elif nodeName_ == 'FirstClassMailType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirstClassMailType') + value_ = self.gds_validate_string(value_, node, 'FirstClassMailType') + self.FirstClassMailType = value_ + self.FirstClassMailType_nsprefix_ = child_.prefix + elif nodeName_ == 'ShippingContents': + obj_ = ShippingContentsType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ShippingContents = obj_ + obj_.original_tagname_ = 'ShippingContents' + elif nodeName_ == 'Postage': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Postage') + value_ = self.gds_validate_string(value_, node, 'Postage') + self.Postage = value_ + self.Postage_nsprefix_ = child_.prefix + elif nodeName_ == 'GrossPounds' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'GrossPounds') + fval_ = self.gds_validate_decimal(fval_, node, 'GrossPounds') + self.GrossPounds = fval_ + self.GrossPounds_nsprefix_ = child_.prefix + elif nodeName_ == 'GrossOunces' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'GrossOunces') + fval_ = self.gds_validate_decimal(fval_, node, 'GrossOunces') + self.GrossOunces = fval_ + self.GrossOunces_nsprefix_ = child_.prefix + elif nodeName_ == 'ContentType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentType') + value_ = self.gds_validate_string(value_, node, 'ContentType') + self.ContentType = value_ + self.ContentType_nsprefix_ = child_.prefix + elif nodeName_ == 'ContentTypeOther': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentTypeOther') + value_ = self.gds_validate_string(value_, node, 'ContentTypeOther') + self.ContentTypeOther = value_ + self.ContentTypeOther_nsprefix_ = child_.prefix + elif nodeName_ == 'Agreement': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Agreement') + value_ = self.gds_validate_string(value_, node, 'Agreement') + self.Agreement = value_ + self.Agreement_nsprefix_ = child_.prefix + elif nodeName_ == 'Comments': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Comments') + value_ = self.gds_validate_string(value_, node, 'Comments') + self.Comments = value_ + self.Comments_nsprefix_ = child_.prefix + elif nodeName_ == 'LicenseNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LicenseNumber') + value_ = self.gds_validate_string(value_, node, 'LicenseNumber') + self.LicenseNumber = value_ + self.LicenseNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'CertificateNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CertificateNumber') + value_ = self.gds_validate_string(value_, node, 'CertificateNumber') + self.CertificateNumber = value_ + self.CertificateNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'InvoiceNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'InvoiceNumber') + value_ = self.gds_validate_string(value_, node, 'InvoiceNumber') + self.InvoiceNumber = value_ + self.InvoiceNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageType') + value_ = self.gds_validate_string(value_, node, 'ImageType') + self.ImageType = value_ + self.ImageType_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageLayout': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageLayout') + value_ = self.gds_validate_string(value_, node, 'ImageLayout') + self.ImageLayout = value_ + self.ImageLayout_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerRefNo': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerRefNo') + value_ = self.gds_validate_string(value_, node, 'CustomerRefNo') + self.CustomerRefNo = value_ + self.CustomerRefNo_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerRefNo2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerRefNo2') + value_ = self.gds_validate_string(value_, node, 'CustomerRefNo2') + self.CustomerRefNo2 = value_ + self.CustomerRefNo2_nsprefix_ = child_.prefix + elif nodeName_ == 'POZipCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'POZipCode') + value_ = self.gds_validate_string(value_, node, 'POZipCode') + self.POZipCode = value_ + self.POZipCode_nsprefix_ = child_.prefix + elif nodeName_ == 'LabelDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LabelDate') + value_ = self.gds_validate_string(value_, node, 'LabelDate') + self.LabelDate = value_ + self.LabelDate_nsprefix_ = child_.prefix + elif nodeName_ == 'HoldForManifest': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'HoldForManifest') + value_ = self.gds_validate_string(value_, node, 'HoldForManifest') + self.HoldForManifest = value_ + self.HoldForManifest_nsprefix_ = child_.prefix + elif nodeName_ == 'EELPFC': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EELPFC') + value_ = self.gds_validate_string(value_, node, 'EELPFC') + self.EELPFC = value_ + self.EELPFC_nsprefix_ = child_.prefix + elif nodeName_ == 'Container': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Container') + value_ = self.gds_validate_string(value_, node, 'Container') + self.Container = value_ + self.Container_nsprefix_ = child_.prefix + elif nodeName_ == 'Length' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Length') + fval_ = self.gds_validate_decimal(fval_, node, 'Length') + self.Length = fval_ + self.Length_nsprefix_ = child_.prefix + elif nodeName_ == 'Width' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Width') + fval_ = self.gds_validate_decimal(fval_, node, 'Width') + self.Width = fval_ + self.Width_nsprefix_ = child_.prefix + elif nodeName_ == 'Height' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Height') + fval_ = self.gds_validate_decimal(fval_, node, 'Height') + self.Height = fval_ + self.Height_nsprefix_ = child_.prefix + elif nodeName_ == 'Girth' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Girth') + fval_ = self.gds_validate_decimal(fval_, node, 'Girth') + self.Girth = fval_ + self.Girth_nsprefix_ = child_.prefix + elif nodeName_ == 'ExtraServices': + obj_ = ExtraServicesType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ExtraServices = obj_ + obj_.original_tagname_ = 'ExtraServices' + elif nodeName_ == 'PriceOptions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PriceOptions') + value_ = self.gds_validate_string(value_, node, 'PriceOptions') + self.PriceOptions = value_ + self.PriceOptions_nsprefix_ = child_.prefix + elif nodeName_ == 'ActionCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ActionCode') + value_ = self.gds_validate_string(value_, node, 'ActionCode') + self.ActionCode = value_ + self.ActionCode_nsprefix_ = child_.prefix + elif nodeName_ == 'OptOutOfSPE': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'OptOutOfSPE') + ival_ = self.gds_validate_boolean(ival_, node, 'OptOutOfSPE') + self.OptOutOfSPE = ival_ + self.OptOutOfSPE_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PermitNumber') + value_ = self.gds_validate_string(value_, node, 'PermitNumber') + self.PermitNumber = value_ + self.PermitNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'AccountZipCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AccountZipCode') + value_ = self.gds_validate_string(value_, node, 'AccountZipCode') + self.AccountZipCode = value_ + self.AccountZipCode_nsprefix_ = child_.prefix + elif nodeName_ == 'Machinable': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'Machinable') + ival_ = self.gds_validate_boolean(ival_, node, 'Machinable') + self.Machinable = ival_ + self.Machinable_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationRateIndicator': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DestinationRateIndicator') + value_ = self.gds_validate_string(value_, node, 'DestinationRateIndicator') + self.DestinationRateIndicator = value_ + self.DestinationRateIndicator_nsprefix_ = child_.prefix + elif nodeName_ == 'MID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'MID') + value_ = self.gds_validate_string(value_, node, 'MID') + self.MID = value_ + self.MID_nsprefix_ = child_.prefix + elif nodeName_ == 'LogisticsManagerMID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LogisticsManagerMID') + value_ = self.gds_validate_string(value_, node, 'LogisticsManagerMID') + self.LogisticsManagerMID = value_ + self.LogisticsManagerMID_nsprefix_ = child_.prefix + elif nodeName_ == 'CRID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CRID') + value_ = self.gds_validate_string(value_, node, 'CRID') + self.CRID = value_ + self.CRID_nsprefix_ = child_.prefix + elif nodeName_ == 'VendorCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'VendorCode') + value_ = self.gds_validate_string(value_, node, 'VendorCode') + self.VendorCode = value_ + self.VendorCode_nsprefix_ = child_.prefix + elif nodeName_ == 'VendorProductVersionNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'VendorProductVersionNumber') + value_ = self.gds_validate_string(value_, node, 'VendorProductVersionNumber') + self.VendorProductVersionNumber = value_ + self.VendorProductVersionNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'ePostageMailerReporting': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ePostageMailerReporting') + value_ = self.gds_validate_string(value_, node, 'ePostageMailerReporting') + self.ePostageMailerReporting = value_ + self.ePostageMailerReporting_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderFirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderFirstName') + value_ = self.gds_validate_string(value_, node, 'SenderFirstName') + self.SenderFirstName = value_ + self.SenderFirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderLastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderLastName') + value_ = self.gds_validate_string(value_, node, 'SenderLastName') + self.SenderLastName = value_ + self.SenderLastName_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderBusinessName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderBusinessName') + value_ = self.gds_validate_string(value_, node, 'SenderBusinessName') + self.SenderBusinessName = value_ + self.SenderBusinessName_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderAddress1') + value_ = self.gds_validate_string(value_, node, 'SenderAddress1') + self.SenderAddress1 = value_ + self.SenderAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderCity') + value_ = self.gds_validate_string(value_, node, 'SenderCity') + self.SenderCity = value_ + self.SenderCity_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderState') + value_ = self.gds_validate_string(value_, node, 'SenderState') + self.SenderState = value_ + self.SenderState_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderZip5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderZip5') + value_ = self.gds_validate_string(value_, node, 'SenderZip5') + self.SenderZip5 = value_ + self.SenderZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderPhone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderPhone') + value_ = self.gds_validate_string(value_, node, 'SenderPhone') + self.SenderPhone = value_ + self.SenderPhone_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderEmail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderEmail') + value_ = self.gds_validate_string(value_, node, 'SenderEmail') + self.SenderEmail = value_ + self.SenderEmail_nsprefix_ = child_.prefix + elif nodeName_ == 'RemainingBarcodes': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RemainingBarcodes') + value_ = self.gds_validate_string(value_, node, 'RemainingBarcodes') + self.RemainingBarcodes = value_ + self.RemainingBarcodes_nsprefix_ = child_.prefix + elif nodeName_ == 'ChargebackCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ChargebackCode') + value_ = self.gds_validate_string(value_, node, 'ChargebackCode') + self.ChargebackCode = value_ + self.ChargebackCode_nsprefix_ = child_.prefix +# end class eVSFirstClassMailIntlRequest + + +class ImageParametersType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ImageParameter=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ImageParameter = ImageParameter + self.ImageParameter_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ImageParametersType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ImageParametersType.subclass: + return ImageParametersType.subclass(*args_, **kwargs_) + else: + return ImageParametersType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ImageParameter(self): + return self.ImageParameter + def set_ImageParameter(self, ImageParameter): + self.ImageParameter = ImageParameter + def has__content(self): + if ( + self.ImageParameter is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ImageParametersType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ImageParametersType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ImageParametersType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ImageParametersType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ImageParametersType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ImageParametersType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ImageParametersType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ImageParameter is not None: + namespaceprefix_ = self.ImageParameter_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageParameter_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageParameter>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageParameter), input_name='ImageParameter')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ImageParameter': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageParameter') + value_ = self.gds_validate_string(value_, node, 'ImageParameter') + self.ImageParameter = value_ + self.ImageParameter_nsprefix_ = child_.prefix +# end class ImageParametersType + + +class ShippingContentsType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ItemDetail=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if ItemDetail is None: + self.ItemDetail = [] + else: + self.ItemDetail = ItemDetail + self.ItemDetail_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ShippingContentsType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ShippingContentsType.subclass: + return ShippingContentsType.subclass(*args_, **kwargs_) + else: + return ShippingContentsType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ItemDetail(self): + return self.ItemDetail + def set_ItemDetail(self, ItemDetail): + self.ItemDetail = ItemDetail + def add_ItemDetail(self, value): + self.ItemDetail.append(value) + def insert_ItemDetail_at(self, index, value): + self.ItemDetail.insert(index, value) + def replace_ItemDetail_at(self, index, value): + self.ItemDetail[index] = value + def has__content(self): + if ( + self.ItemDetail + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ShippingContentsType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ShippingContentsType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ShippingContentsType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ShippingContentsType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ShippingContentsType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ShippingContentsType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ShippingContentsType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for ItemDetail_ in self.ItemDetail: + namespaceprefix_ = self.ItemDetail_nsprefix_ + ':' if (UseCapturedNS_ and self.ItemDetail_nsprefix_) else '' + ItemDetail_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ItemDetail', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ItemDetail': + obj_ = ItemDetailType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ItemDetail.append(obj_) + obj_.original_tagname_ = 'ItemDetail' +# end class ShippingContentsType + + +class ItemDetailType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Description=None, Quantity=None, Value=None, NetPounds=None, NetOunces=None, HSTariffNumber=None, CountryOfOrigin=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Description = Description + self.Description_nsprefix_ = None + self.Quantity = Quantity + self.Quantity_nsprefix_ = None + self.Value = Value + self.Value_nsprefix_ = None + self.NetPounds = NetPounds + self.NetPounds_nsprefix_ = None + self.NetOunces = NetOunces + self.NetOunces_nsprefix_ = None + self.HSTariffNumber = HSTariffNumber + self.HSTariffNumber_nsprefix_ = None + self.CountryOfOrigin = CountryOfOrigin + self.CountryOfOrigin_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ItemDetailType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ItemDetailType.subclass: + return ItemDetailType.subclass(*args_, **kwargs_) + else: + return ItemDetailType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Description(self): + return self.Description + def set_Description(self, Description): + self.Description = Description + def get_Quantity(self): + return self.Quantity + def set_Quantity(self, Quantity): + self.Quantity = Quantity + def get_Value(self): + return self.Value + def set_Value(self, Value): + self.Value = Value + def get_NetPounds(self): + return self.NetPounds + def set_NetPounds(self, NetPounds): + self.NetPounds = NetPounds + def get_NetOunces(self): + return self.NetOunces + def set_NetOunces(self, NetOunces): + self.NetOunces = NetOunces + def get_HSTariffNumber(self): + return self.HSTariffNumber + def set_HSTariffNumber(self, HSTariffNumber): + self.HSTariffNumber = HSTariffNumber + def get_CountryOfOrigin(self): + return self.CountryOfOrigin + def set_CountryOfOrigin(self, CountryOfOrigin): + self.CountryOfOrigin = CountryOfOrigin + def has__content(self): + if ( + self.Description is not None or + self.Quantity is not None or + self.Value is not None or + self.NetPounds is not None or + self.NetOunces is not None or + self.HSTariffNumber is not None or + self.CountryOfOrigin is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ItemDetailType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ItemDetailType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ItemDetailType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ItemDetailType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ItemDetailType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ItemDetailType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ItemDetailType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Description is not None: + namespaceprefix_ = self.Description_nsprefix_ + ':' if (UseCapturedNS_ and self.Description_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDescription>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Description), input_name='Description')), namespaceprefix_ , eol_)) + if self.Quantity is not None: + namespaceprefix_ = self.Quantity_nsprefix_ + ':' if (UseCapturedNS_ and self.Quantity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sQuantity>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Quantity, input_name='Quantity'), namespaceprefix_ , eol_)) + if self.Value is not None: + namespaceprefix_ = self.Value_nsprefix_ + ':' if (UseCapturedNS_ and self.Value_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sValue>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Value, input_name='Value'), namespaceprefix_ , eol_)) + if self.NetPounds is not None: + namespaceprefix_ = self.NetPounds_nsprefix_ + ':' if (UseCapturedNS_ and self.NetPounds_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNetPounds>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.NetPounds, input_name='NetPounds'), namespaceprefix_ , eol_)) + if self.NetOunces is not None: + namespaceprefix_ = self.NetOunces_nsprefix_ + ':' if (UseCapturedNS_ and self.NetOunces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNetOunces>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.NetOunces, input_name='NetOunces'), namespaceprefix_ , eol_)) + if self.HSTariffNumber is not None: + namespaceprefix_ = self.HSTariffNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.HSTariffNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHSTariffNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.HSTariffNumber), input_name='HSTariffNumber')), namespaceprefix_ , eol_)) + if self.CountryOfOrigin is not None: + namespaceprefix_ = self.CountryOfOrigin_nsprefix_ + ':' if (UseCapturedNS_ and self.CountryOfOrigin_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCountryOfOrigin>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CountryOfOrigin), input_name='CountryOfOrigin')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Description': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Description') + value_ = self.gds_validate_string(value_, node, 'Description') + self.Description = value_ + self.Description_nsprefix_ = child_.prefix + elif nodeName_ == 'Quantity' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Quantity') + ival_ = self.gds_validate_integer(ival_, node, 'Quantity') + self.Quantity = ival_ + self.Quantity_nsprefix_ = child_.prefix + elif nodeName_ == 'Value' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Value') + fval_ = self.gds_validate_decimal(fval_, node, 'Value') + self.Value = fval_ + self.Value_nsprefix_ = child_.prefix + elif nodeName_ == 'NetPounds' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'NetPounds') + fval_ = self.gds_validate_decimal(fval_, node, 'NetPounds') + self.NetPounds = fval_ + self.NetPounds_nsprefix_ = child_.prefix + elif nodeName_ == 'NetOunces' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'NetOunces') + fval_ = self.gds_validate_decimal(fval_, node, 'NetOunces') + self.NetOunces = fval_ + self.NetOunces_nsprefix_ = child_.prefix + elif nodeName_ == 'HSTariffNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'HSTariffNumber') + value_ = self.gds_validate_string(value_, node, 'HSTariffNumber') + self.HSTariffNumber = value_ + self.HSTariffNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'CountryOfOrigin': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CountryOfOrigin') + value_ = self.gds_validate_string(value_, node, 'CountryOfOrigin') + self.CountryOfOrigin = value_ + self.CountryOfOrigin_nsprefix_ = child_.prefix +# end class ItemDetailType + + +class ExtraServicesType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ExtraService=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if ExtraService is None: + self.ExtraService = [] + else: + self.ExtraService = ExtraService + self.ExtraService_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExtraServicesType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExtraServicesType.subclass: + return ExtraServicesType.subclass(*args_, **kwargs_) + else: + return ExtraServicesType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ExtraService(self): + return self.ExtraService + def set_ExtraService(self, ExtraService): + self.ExtraService = ExtraService + def add_ExtraService(self, value): + self.ExtraService.append(value) + def insert_ExtraService_at(self, index, value): + self.ExtraService.insert(index, value) + def replace_ExtraService_at(self, index, value): + self.ExtraService[index] = value + def has__content(self): + if ( + self.ExtraService + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExtraServicesType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExtraServicesType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtraServicesType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtraServicesType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtraServicesType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for ExtraService_ in self.ExtraService: + namespaceprefix_ = self.ExtraService_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraService_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sExtraService>%s%s' % (namespaceprefix_ , self.gds_format_integer(ExtraService_, input_name='ExtraService'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ExtraService' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ExtraService') + ival_ = self.gds_validate_integer(ival_, node, 'ExtraService') + self.ExtraService.append(ival_) + self.ExtraService_nsprefix_ = child_.prefix +# end class ExtraServicesType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSFirstClassMailIntlRequest' + rootClass = eVSFirstClassMailIntlRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSFirstClassMailIntlRequest' + rootClass = eVSFirstClassMailIntlRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSFirstClassMailIntlRequest' + rootClass = eVSFirstClassMailIntlRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSFirstClassMailIntlRequest' + rootClass = eVSFirstClassMailIntlRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from evs_first_class_mail_intl_request import *\n\n') + sys.stdout.write('import evs_first_class_mail_intl_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "ExtraServicesType", + "ImageParametersType", + "ItemDetailType", + "ShippingContentsType", + "eVSFirstClassMailIntlRequest" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/evs_first_class_mail_intl_response.py b/modules/connectors/usps/karrio/schemas/usps/evs_first_class_mail_intl_response.py new file mode 100644 index 0000000000..7208a2b048 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/evs_first_class_mail_intl_response.py @@ -0,0 +1,1746 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:48 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/evs_first_class_mail_intl_response.py') +# +# Command line arguments: +# ./schemas/eVSFirstClassMailIntlResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/evs_first_class_mail_intl_response.py" ./schemas/eVSFirstClassMailIntlResponse.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class eVSFirstClassMailIntlResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Postage=None, TotalValue=None, BarcodeNumber=None, LabelImage=None, Page2Image=None, Page3Image=None, Prohibitions=None, Restrictions=None, Observations=None, Regulations=None, AdditionalRestrictions=None, ExtraServices=None, RemainingBarcodes=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Postage = Postage + self.Postage_nsprefix_ = None + self.TotalValue = TotalValue + self.TotalValue_nsprefix_ = None + self.BarcodeNumber = BarcodeNumber + self.BarcodeNumber_nsprefix_ = None + self.LabelImage = LabelImage + self.LabelImage_nsprefix_ = None + self.Page2Image = Page2Image + self.Page2Image_nsprefix_ = None + self.Page3Image = Page3Image + self.Page3Image_nsprefix_ = None + self.Prohibitions = Prohibitions + self.Prohibitions_nsprefix_ = None + self.Restrictions = Restrictions + self.Restrictions_nsprefix_ = None + self.Observations = Observations + self.Observations_nsprefix_ = None + self.Regulations = Regulations + self.Regulations_nsprefix_ = None + self.AdditionalRestrictions = AdditionalRestrictions + self.AdditionalRestrictions_nsprefix_ = None + self.ExtraServices = ExtraServices + self.ExtraServices_nsprefix_ = None + self.RemainingBarcodes = RemainingBarcodes + self.RemainingBarcodes_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, eVSFirstClassMailIntlResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if eVSFirstClassMailIntlResponse.subclass: + return eVSFirstClassMailIntlResponse.subclass(*args_, **kwargs_) + else: + return eVSFirstClassMailIntlResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Postage(self): + return self.Postage + def set_Postage(self, Postage): + self.Postage = Postage + def get_TotalValue(self): + return self.TotalValue + def set_TotalValue(self, TotalValue): + self.TotalValue = TotalValue + def get_BarcodeNumber(self): + return self.BarcodeNumber + def set_BarcodeNumber(self, BarcodeNumber): + self.BarcodeNumber = BarcodeNumber + def get_LabelImage(self): + return self.LabelImage + def set_LabelImage(self, LabelImage): + self.LabelImage = LabelImage + def get_Page2Image(self): + return self.Page2Image + def set_Page2Image(self, Page2Image): + self.Page2Image = Page2Image + def get_Page3Image(self): + return self.Page3Image + def set_Page3Image(self, Page3Image): + self.Page3Image = Page3Image + def get_Prohibitions(self): + return self.Prohibitions + def set_Prohibitions(self, Prohibitions): + self.Prohibitions = Prohibitions + def get_Restrictions(self): + return self.Restrictions + def set_Restrictions(self, Restrictions): + self.Restrictions = Restrictions + def get_Observations(self): + return self.Observations + def set_Observations(self, Observations): + self.Observations = Observations + def get_Regulations(self): + return self.Regulations + def set_Regulations(self, Regulations): + self.Regulations = Regulations + def get_AdditionalRestrictions(self): + return self.AdditionalRestrictions + def set_AdditionalRestrictions(self, AdditionalRestrictions): + self.AdditionalRestrictions = AdditionalRestrictions + def get_ExtraServices(self): + return self.ExtraServices + def set_ExtraServices(self, ExtraServices): + self.ExtraServices = ExtraServices + def get_RemainingBarcodes(self): + return self.RemainingBarcodes + def set_RemainingBarcodes(self, RemainingBarcodes): + self.RemainingBarcodes = RemainingBarcodes + def has__content(self): + if ( + self.Postage is not None or + self.TotalValue is not None or + self.BarcodeNumber is not None or + self.LabelImage is not None or + self.Page2Image is not None or + self.Page3Image is not None or + self.Prohibitions is not None or + self.Restrictions is not None or + self.Observations is not None or + self.Regulations is not None or + self.AdditionalRestrictions is not None or + self.ExtraServices is not None or + self.RemainingBarcodes is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSFirstClassMailIntlResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('eVSFirstClassMailIntlResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'eVSFirstClassMailIntlResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='eVSFirstClassMailIntlResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='eVSFirstClassMailIntlResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='eVSFirstClassMailIntlResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSFirstClassMailIntlResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Postage is not None: + namespaceprefix_ = self.Postage_nsprefix_ + ':' if (UseCapturedNS_ and self.Postage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPostage>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Postage, input_name='Postage'), namespaceprefix_ , eol_)) + if self.TotalValue is not None: + namespaceprefix_ = self.TotalValue_nsprefix_ + ':' if (UseCapturedNS_ and self.TotalValue_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTotalValue>%s%s' % (namespaceprefix_ , self.gds_format_float(self.TotalValue, input_name='TotalValue'), namespaceprefix_ , eol_)) + if self.BarcodeNumber is not None: + namespaceprefix_ = self.BarcodeNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.BarcodeNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBarcodeNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BarcodeNumber), input_name='BarcodeNumber')), namespaceprefix_ , eol_)) + if self.LabelImage is not None: + namespaceprefix_ = self.LabelImage_nsprefix_ + ':' if (UseCapturedNS_ and self.LabelImage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLabelImage>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LabelImage), input_name='LabelImage')), namespaceprefix_ , eol_)) + if self.Page2Image is not None: + namespaceprefix_ = self.Page2Image_nsprefix_ + ':' if (UseCapturedNS_ and self.Page2Image_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPage2Image>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Page2Image), input_name='Page2Image')), namespaceprefix_ , eol_)) + if self.Page3Image is not None: + namespaceprefix_ = self.Page3Image_nsprefix_ + ':' if (UseCapturedNS_ and self.Page3Image_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPage3Image>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Page3Image), input_name='Page3Image')), namespaceprefix_ , eol_)) + if self.Prohibitions is not None: + namespaceprefix_ = self.Prohibitions_nsprefix_ + ':' if (UseCapturedNS_ and self.Prohibitions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sProhibitions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Prohibitions), input_name='Prohibitions')), namespaceprefix_ , eol_)) + if self.Restrictions is not None: + namespaceprefix_ = self.Restrictions_nsprefix_ + ':' if (UseCapturedNS_ and self.Restrictions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRestrictions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Restrictions), input_name='Restrictions')), namespaceprefix_ , eol_)) + if self.Observations is not None: + namespaceprefix_ = self.Observations_nsprefix_ + ':' if (UseCapturedNS_ and self.Observations_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sObservations>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Observations), input_name='Observations')), namespaceprefix_ , eol_)) + if self.Regulations is not None: + namespaceprefix_ = self.Regulations_nsprefix_ + ':' if (UseCapturedNS_ and self.Regulations_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRegulations>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Regulations), input_name='Regulations')), namespaceprefix_ , eol_)) + if self.AdditionalRestrictions is not None: + namespaceprefix_ = self.AdditionalRestrictions_nsprefix_ + ':' if (UseCapturedNS_ and self.AdditionalRestrictions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAdditionalRestrictions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AdditionalRestrictions), input_name='AdditionalRestrictions')), namespaceprefix_ , eol_)) + if self.ExtraServices is not None: + namespaceprefix_ = self.ExtraServices_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraServices_nsprefix_) else '' + self.ExtraServices.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ExtraServices', pretty_print=pretty_print) + if self.RemainingBarcodes is not None: + namespaceprefix_ = self.RemainingBarcodes_nsprefix_ + ':' if (UseCapturedNS_ and self.RemainingBarcodes_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRemainingBarcodes>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.RemainingBarcodes, input_name='RemainingBarcodes'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Postage' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Postage') + fval_ = self.gds_validate_float(fval_, node, 'Postage') + self.Postage = fval_ + self.Postage_nsprefix_ = child_.prefix + elif nodeName_ == 'TotalValue' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'TotalValue') + fval_ = self.gds_validate_float(fval_, node, 'TotalValue') + self.TotalValue = fval_ + self.TotalValue_nsprefix_ = child_.prefix + elif nodeName_ == 'BarcodeNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BarcodeNumber') + value_ = self.gds_validate_string(value_, node, 'BarcodeNumber') + self.BarcodeNumber = value_ + self.BarcodeNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'LabelImage': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LabelImage') + value_ = self.gds_validate_string(value_, node, 'LabelImage') + self.LabelImage = value_ + self.LabelImage_nsprefix_ = child_.prefix + elif nodeName_ == 'Page2Image': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Page2Image') + value_ = self.gds_validate_string(value_, node, 'Page2Image') + self.Page2Image = value_ + self.Page2Image_nsprefix_ = child_.prefix + elif nodeName_ == 'Page3Image': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Page3Image') + value_ = self.gds_validate_string(value_, node, 'Page3Image') + self.Page3Image = value_ + self.Page3Image_nsprefix_ = child_.prefix + elif nodeName_ == 'Prohibitions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Prohibitions') + value_ = self.gds_validate_string(value_, node, 'Prohibitions') + self.Prohibitions = value_ + self.Prohibitions_nsprefix_ = child_.prefix + elif nodeName_ == 'Restrictions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Restrictions') + value_ = self.gds_validate_string(value_, node, 'Restrictions') + self.Restrictions = value_ + self.Restrictions_nsprefix_ = child_.prefix + elif nodeName_ == 'Observations': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Observations') + value_ = self.gds_validate_string(value_, node, 'Observations') + self.Observations = value_ + self.Observations_nsprefix_ = child_.prefix + elif nodeName_ == 'Regulations': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Regulations') + value_ = self.gds_validate_string(value_, node, 'Regulations') + self.Regulations = value_ + self.Regulations_nsprefix_ = child_.prefix + elif nodeName_ == 'AdditionalRestrictions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AdditionalRestrictions') + value_ = self.gds_validate_string(value_, node, 'AdditionalRestrictions') + self.AdditionalRestrictions = value_ + self.AdditionalRestrictions_nsprefix_ = child_.prefix + elif nodeName_ == 'ExtraServices': + obj_ = ExtraServicesType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ExtraServices = obj_ + obj_.original_tagname_ = 'ExtraServices' + elif nodeName_ == 'RemainingBarcodes' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'RemainingBarcodes') + ival_ = self.gds_validate_integer(ival_, node, 'RemainingBarcodes') + self.RemainingBarcodes = ival_ + self.RemainingBarcodes_nsprefix_ = child_.prefix +# end class eVSFirstClassMailIntlResponse + + +class ExtraServicesType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ExtraService=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if ExtraService is None: + self.ExtraService = [] + else: + self.ExtraService = ExtraService + self.ExtraService_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExtraServicesType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExtraServicesType.subclass: + return ExtraServicesType.subclass(*args_, **kwargs_) + else: + return ExtraServicesType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ExtraService(self): + return self.ExtraService + def set_ExtraService(self, ExtraService): + self.ExtraService = ExtraService + def add_ExtraService(self, value): + self.ExtraService.append(value) + def insert_ExtraService_at(self, index, value): + self.ExtraService.insert(index, value) + def replace_ExtraService_at(self, index, value): + self.ExtraService[index] = value + def has__content(self): + if ( + self.ExtraService + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExtraServicesType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExtraServicesType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtraServicesType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtraServicesType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtraServicesType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for ExtraService_ in self.ExtraService: + namespaceprefix_ = self.ExtraService_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraService_nsprefix_) else '' + ExtraService_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ExtraService', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ExtraService': + obj_ = ExtraServiceType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ExtraService.append(obj_) + obj_.original_tagname_ = 'ExtraService' +# end class ExtraServicesType + + +class ExtraServiceType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ServiceID=None, ServiceName=None, Price=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ServiceID = ServiceID + self.ServiceID_nsprefix_ = None + self.ServiceName = ServiceName + self.ServiceName_nsprefix_ = None + self.Price = Price + self.Price_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExtraServiceType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExtraServiceType.subclass: + return ExtraServiceType.subclass(*args_, **kwargs_) + else: + return ExtraServiceType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ServiceID(self): + return self.ServiceID + def set_ServiceID(self, ServiceID): + self.ServiceID = ServiceID + def get_ServiceName(self): + return self.ServiceName + def set_ServiceName(self, ServiceName): + self.ServiceName = ServiceName + def get_Price(self): + return self.Price + def set_Price(self, Price): + self.Price = Price + def has__content(self): + if ( + self.ServiceID is not None or + self.ServiceName is not None or + self.Price is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServiceType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExtraServiceType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExtraServiceType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtraServiceType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtraServiceType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtraServiceType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServiceType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ServiceID is not None: + namespaceprefix_ = self.ServiceID_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceID>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ServiceID, input_name='ServiceID'), namespaceprefix_ , eol_)) + if self.ServiceName is not None: + namespaceprefix_ = self.ServiceName_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceName), input_name='ServiceName')), namespaceprefix_ , eol_)) + if self.Price is not None: + namespaceprefix_ = self.Price_nsprefix_ + ':' if (UseCapturedNS_ and self.Price_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPrice>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Price, input_name='Price'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ServiceID' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ServiceID') + ival_ = self.gds_validate_integer(ival_, node, 'ServiceID') + self.ServiceID = ival_ + self.ServiceID_nsprefix_ = child_.prefix + elif nodeName_ == 'ServiceName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceName') + value_ = self.gds_validate_string(value_, node, 'ServiceName') + self.ServiceName = value_ + self.ServiceName_nsprefix_ = child_.prefix + elif nodeName_ == 'Price' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Price') + fval_ = self.gds_validate_float(fval_, node, 'Price') + self.Price = fval_ + self.Price_nsprefix_ = child_.prefix +# end class ExtraServiceType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSFirstClassMailIntlResponse' + rootClass = eVSFirstClassMailIntlResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSFirstClassMailIntlResponse' + rootClass = eVSFirstClassMailIntlResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSFirstClassMailIntlResponse' + rootClass = eVSFirstClassMailIntlResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSFirstClassMailIntlResponse' + rootClass = eVSFirstClassMailIntlResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from evs_first_class_mail_intl_response import *\n\n') + sys.stdout.write('import evs_first_class_mail_intl_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "ExtraServiceType", + "ExtraServicesType", + "eVSFirstClassMailIntlResponse" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/evs_gxg_get_label_request.py b/modules/connectors/usps/karrio/schemas/usps/evs_gxg_get_label_request.py new file mode 100644 index 0000000000..df61d0c535 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/evs_gxg_get_label_request.py @@ -0,0 +1,3352 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:49 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/evs_gxg_get_label_request.py') +# +# Command line arguments: +# ./schemas/eVSGXGGetLabelRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/evs_gxg_get_label_request.py" ./schemas/eVSGXGGetLabelRequest.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class eVSGXGGetLabelRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, Option=None, Revision=None, ImageParameters=None, FromFirstName=None, FromMiddleInitial=None, FromLastName=None, FromFirm=None, FromAddress1=None, FromAddress2=None, FromUrbanization=None, FromCity=None, FromState=None, FromZIP5=None, FromZIP4=None, FromPhone=None, ShipFromZIP=None, SenderEMail=None, ToFirstName=None, ToLastName=None, ToFirm=None, ToAddress1=None, ToAddress2=None, ToAddress3=None, ToPostalCode=None, ToPhone=None, RecipientEMail=None, ToDPID=None, ToProvince=None, ToTaxID=None, Container=None, ContentType=None, ShippingContents=None, PurposeOfShipment=None, PartiesToTransaction=None, Agreement=None, Postage=None, InsuredValue=None, GrossPounds=None, GrossOunces=None, Length=None, Width=None, Height=None, Girth=None, Shape=None, CIRequired=None, InvoiceDate=None, InvoiceNumber=None, CustomerOrderNumber=None, CustOrderNumber=None, TermsDelivery=None, TermsDeliveryOther=None, PackingCost=None, CountryUltDest=None, CIAgreement=None, ImageType=None, ImageLayout=None, CustomerRefNo=None, CustomerRefNo2=None, ShipDate=None, HoldForManifest=None, PriceOptions=None, CommercialShipment=None, BuyerFirstName=None, BuyerLastName=None, BuyerAddress1=None, BuyerAddress2=None, BuyerAddress3=None, BuyerCity=None, BuyerState=None, BuyerPostalCode=None, BuyerCountry=None, BuyerTaxID=None, BuyerRecipient=None, TermsPayment=None, ActionCode=None, OptOutOfSPE=None, PermitNumber=None, AccountZipCode=None, Machinable=None, DestinationRateIndicator=None, MID=None, LogisticsManagerMID=None, CRID=None, VendorCode=None, VendorProductVersionNumber=None, OverrideMID=None, ChargebackCode=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.Option = Option + self.Option_nsprefix_ = None + self.Revision = Revision + self.Revision_nsprefix_ = None + self.ImageParameters = ImageParameters + self.ImageParameters_nsprefix_ = None + self.FromFirstName = FromFirstName + self.FromFirstName_nsprefix_ = None + self.FromMiddleInitial = FromMiddleInitial + self.FromMiddleInitial_nsprefix_ = None + self.FromLastName = FromLastName + self.FromLastName_nsprefix_ = None + self.FromFirm = FromFirm + self.FromFirm_nsprefix_ = None + self.FromAddress1 = FromAddress1 + self.FromAddress1_nsprefix_ = None + self.FromAddress2 = FromAddress2 + self.FromAddress2_nsprefix_ = None + self.FromUrbanization = FromUrbanization + self.FromUrbanization_nsprefix_ = None + self.FromCity = FromCity + self.FromCity_nsprefix_ = None + self.FromState = FromState + self.FromState_nsprefix_ = None + self.FromZIP5 = FromZIP5 + self.FromZIP5_nsprefix_ = None + self.FromZIP4 = FromZIP4 + self.FromZIP4_nsprefix_ = None + self.FromPhone = FromPhone + self.FromPhone_nsprefix_ = None + self.ShipFromZIP = ShipFromZIP + self.ShipFromZIP_nsprefix_ = None + self.SenderEMail = SenderEMail + self.SenderEMail_nsprefix_ = None + self.ToFirstName = ToFirstName + self.ToFirstName_nsprefix_ = None + self.ToLastName = ToLastName + self.ToLastName_nsprefix_ = None + self.ToFirm = ToFirm + self.ToFirm_nsprefix_ = None + self.ToAddress1 = ToAddress1 + self.ToAddress1_nsprefix_ = None + self.ToAddress2 = ToAddress2 + self.ToAddress2_nsprefix_ = None + self.ToAddress3 = ToAddress3 + self.ToAddress3_nsprefix_ = None + self.ToPostalCode = ToPostalCode + self.ToPostalCode_nsprefix_ = None + self.ToPhone = ToPhone + self.ToPhone_nsprefix_ = None + self.RecipientEMail = RecipientEMail + self.RecipientEMail_nsprefix_ = None + self.ToDPID = ToDPID + self.ToDPID_nsprefix_ = None + self.ToProvince = ToProvince + self.ToProvince_nsprefix_ = None + self.ToTaxID = ToTaxID + self.ToTaxID_nsprefix_ = None + self.Container = Container + self.Container_nsprefix_ = None + self.ContentType = ContentType + self.ContentType_nsprefix_ = None + self.ShippingContents = ShippingContents + self.ShippingContents_nsprefix_ = None + self.PurposeOfShipment = PurposeOfShipment + self.PurposeOfShipment_nsprefix_ = None + self.PartiesToTransaction = PartiesToTransaction + self.PartiesToTransaction_nsprefix_ = None + self.Agreement = Agreement + self.Agreement_nsprefix_ = None + self.Postage = Postage + self.Postage_nsprefix_ = None + self.InsuredValue = InsuredValue + self.InsuredValue_nsprefix_ = None + self.GrossPounds = GrossPounds + self.GrossPounds_nsprefix_ = None + self.GrossOunces = GrossOunces + self.GrossOunces_nsprefix_ = None + self.Length = Length + self.Length_nsprefix_ = None + self.Width = Width + self.Width_nsprefix_ = None + self.Height = Height + self.Height_nsprefix_ = None + self.Girth = Girth + self.Girth_nsprefix_ = None + self.Shape = Shape + self.Shape_nsprefix_ = None + self.CIRequired = CIRequired + self.CIRequired_nsprefix_ = None + self.InvoiceDate = InvoiceDate + self.InvoiceDate_nsprefix_ = None + self.InvoiceNumber = InvoiceNumber + self.InvoiceNumber_nsprefix_ = None + self.CustomerOrderNumber = CustomerOrderNumber + self.CustomerOrderNumber_nsprefix_ = None + self.CustOrderNumber = CustOrderNumber + self.CustOrderNumber_nsprefix_ = None + self.TermsDelivery = TermsDelivery + self.TermsDelivery_nsprefix_ = None + self.TermsDeliveryOther = TermsDeliveryOther + self.TermsDeliveryOther_nsprefix_ = None + self.PackingCost = PackingCost + self.PackingCost_nsprefix_ = None + self.CountryUltDest = CountryUltDest + self.CountryUltDest_nsprefix_ = None + self.CIAgreement = CIAgreement + self.CIAgreement_nsprefix_ = None + self.ImageType = ImageType + self.ImageType_nsprefix_ = None + self.ImageLayout = ImageLayout + self.ImageLayout_nsprefix_ = None + self.CustomerRefNo = CustomerRefNo + self.CustomerRefNo_nsprefix_ = None + self.CustomerRefNo2 = CustomerRefNo2 + self.CustomerRefNo2_nsprefix_ = None + self.ShipDate = ShipDate + self.ShipDate_nsprefix_ = None + self.HoldForManifest = HoldForManifest + self.HoldForManifest_nsprefix_ = None + self.PriceOptions = PriceOptions + self.PriceOptions_nsprefix_ = None + self.CommercialShipment = CommercialShipment + self.CommercialShipment_nsprefix_ = None + self.BuyerFirstName = BuyerFirstName + self.BuyerFirstName_nsprefix_ = None + self.BuyerLastName = BuyerLastName + self.BuyerLastName_nsprefix_ = None + self.BuyerAddress1 = BuyerAddress1 + self.BuyerAddress1_nsprefix_ = None + self.BuyerAddress2 = BuyerAddress2 + self.BuyerAddress2_nsprefix_ = None + self.BuyerAddress3 = BuyerAddress3 + self.BuyerAddress3_nsprefix_ = None + self.BuyerCity = BuyerCity + self.BuyerCity_nsprefix_ = None + self.BuyerState = BuyerState + self.BuyerState_nsprefix_ = None + self.BuyerPostalCode = BuyerPostalCode + self.BuyerPostalCode_nsprefix_ = None + self.BuyerCountry = BuyerCountry + self.BuyerCountry_nsprefix_ = None + self.BuyerTaxID = BuyerTaxID + self.BuyerTaxID_nsprefix_ = None + self.BuyerRecipient = BuyerRecipient + self.BuyerRecipient_nsprefix_ = None + self.TermsPayment = TermsPayment + self.TermsPayment_nsprefix_ = None + self.ActionCode = ActionCode + self.ActionCode_nsprefix_ = None + self.OptOutOfSPE = OptOutOfSPE + self.OptOutOfSPE_nsprefix_ = None + self.PermitNumber = PermitNumber + self.PermitNumber_nsprefix_ = None + self.AccountZipCode = AccountZipCode + self.AccountZipCode_nsprefix_ = None + self.Machinable = Machinable + self.Machinable_nsprefix_ = None + self.DestinationRateIndicator = DestinationRateIndicator + self.DestinationRateIndicator_nsprefix_ = None + self.MID = MID + self.MID_nsprefix_ = None + self.LogisticsManagerMID = LogisticsManagerMID + self.LogisticsManagerMID_nsprefix_ = None + self.CRID = CRID + self.CRID_nsprefix_ = None + self.VendorCode = VendorCode + self.VendorCode_nsprefix_ = None + self.VendorProductVersionNumber = VendorProductVersionNumber + self.VendorProductVersionNumber_nsprefix_ = None + self.OverrideMID = OverrideMID + self.OverrideMID_nsprefix_ = None + self.ChargebackCode = ChargebackCode + self.ChargebackCode_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, eVSGXGGetLabelRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if eVSGXGGetLabelRequest.subclass: + return eVSGXGGetLabelRequest.subclass(*args_, **kwargs_) + else: + return eVSGXGGetLabelRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Option(self): + return self.Option + def set_Option(self, Option): + self.Option = Option + def get_Revision(self): + return self.Revision + def set_Revision(self, Revision): + self.Revision = Revision + def get_ImageParameters(self): + return self.ImageParameters + def set_ImageParameters(self, ImageParameters): + self.ImageParameters = ImageParameters + def get_FromFirstName(self): + return self.FromFirstName + def set_FromFirstName(self, FromFirstName): + self.FromFirstName = FromFirstName + def get_FromMiddleInitial(self): + return self.FromMiddleInitial + def set_FromMiddleInitial(self, FromMiddleInitial): + self.FromMiddleInitial = FromMiddleInitial + def get_FromLastName(self): + return self.FromLastName + def set_FromLastName(self, FromLastName): + self.FromLastName = FromLastName + def get_FromFirm(self): + return self.FromFirm + def set_FromFirm(self, FromFirm): + self.FromFirm = FromFirm + def get_FromAddress1(self): + return self.FromAddress1 + def set_FromAddress1(self, FromAddress1): + self.FromAddress1 = FromAddress1 + def get_FromAddress2(self): + return self.FromAddress2 + def set_FromAddress2(self, FromAddress2): + self.FromAddress2 = FromAddress2 + def get_FromUrbanization(self): + return self.FromUrbanization + def set_FromUrbanization(self, FromUrbanization): + self.FromUrbanization = FromUrbanization + def get_FromCity(self): + return self.FromCity + def set_FromCity(self, FromCity): + self.FromCity = FromCity + def get_FromState(self): + return self.FromState + def set_FromState(self, FromState): + self.FromState = FromState + def get_FromZIP5(self): + return self.FromZIP5 + def set_FromZIP5(self, FromZIP5): + self.FromZIP5 = FromZIP5 + def get_FromZIP4(self): + return self.FromZIP4 + def set_FromZIP4(self, FromZIP4): + self.FromZIP4 = FromZIP4 + def get_FromPhone(self): + return self.FromPhone + def set_FromPhone(self, FromPhone): + self.FromPhone = FromPhone + def get_ShipFromZIP(self): + return self.ShipFromZIP + def set_ShipFromZIP(self, ShipFromZIP): + self.ShipFromZIP = ShipFromZIP + def get_SenderEMail(self): + return self.SenderEMail + def set_SenderEMail(self, SenderEMail): + self.SenderEMail = SenderEMail + def get_ToFirstName(self): + return self.ToFirstName + def set_ToFirstName(self, ToFirstName): + self.ToFirstName = ToFirstName + def get_ToLastName(self): + return self.ToLastName + def set_ToLastName(self, ToLastName): + self.ToLastName = ToLastName + def get_ToFirm(self): + return self.ToFirm + def set_ToFirm(self, ToFirm): + self.ToFirm = ToFirm + def get_ToAddress1(self): + return self.ToAddress1 + def set_ToAddress1(self, ToAddress1): + self.ToAddress1 = ToAddress1 + def get_ToAddress2(self): + return self.ToAddress2 + def set_ToAddress2(self, ToAddress2): + self.ToAddress2 = ToAddress2 + def get_ToAddress3(self): + return self.ToAddress3 + def set_ToAddress3(self, ToAddress3): + self.ToAddress3 = ToAddress3 + def get_ToPostalCode(self): + return self.ToPostalCode + def set_ToPostalCode(self, ToPostalCode): + self.ToPostalCode = ToPostalCode + def get_ToPhone(self): + return self.ToPhone + def set_ToPhone(self, ToPhone): + self.ToPhone = ToPhone + def get_RecipientEMail(self): + return self.RecipientEMail + def set_RecipientEMail(self, RecipientEMail): + self.RecipientEMail = RecipientEMail + def get_ToDPID(self): + return self.ToDPID + def set_ToDPID(self, ToDPID): + self.ToDPID = ToDPID + def get_ToProvince(self): + return self.ToProvince + def set_ToProvince(self, ToProvince): + self.ToProvince = ToProvince + def get_ToTaxID(self): + return self.ToTaxID + def set_ToTaxID(self, ToTaxID): + self.ToTaxID = ToTaxID + def get_Container(self): + return self.Container + def set_Container(self, Container): + self.Container = Container + def get_ContentType(self): + return self.ContentType + def set_ContentType(self, ContentType): + self.ContentType = ContentType + def get_ShippingContents(self): + return self.ShippingContents + def set_ShippingContents(self, ShippingContents): + self.ShippingContents = ShippingContents + def get_PurposeOfShipment(self): + return self.PurposeOfShipment + def set_PurposeOfShipment(self, PurposeOfShipment): + self.PurposeOfShipment = PurposeOfShipment + def get_PartiesToTransaction(self): + return self.PartiesToTransaction + def set_PartiesToTransaction(self, PartiesToTransaction): + self.PartiesToTransaction = PartiesToTransaction + def get_Agreement(self): + return self.Agreement + def set_Agreement(self, Agreement): + self.Agreement = Agreement + def get_Postage(self): + return self.Postage + def set_Postage(self, Postage): + self.Postage = Postage + def get_InsuredValue(self): + return self.InsuredValue + def set_InsuredValue(self, InsuredValue): + self.InsuredValue = InsuredValue + def get_GrossPounds(self): + return self.GrossPounds + def set_GrossPounds(self, GrossPounds): + self.GrossPounds = GrossPounds + def get_GrossOunces(self): + return self.GrossOunces + def set_GrossOunces(self, GrossOunces): + self.GrossOunces = GrossOunces + def get_Length(self): + return self.Length + def set_Length(self, Length): + self.Length = Length + def get_Width(self): + return self.Width + def set_Width(self, Width): + self.Width = Width + def get_Height(self): + return self.Height + def set_Height(self, Height): + self.Height = Height + def get_Girth(self): + return self.Girth + def set_Girth(self, Girth): + self.Girth = Girth + def get_Shape(self): + return self.Shape + def set_Shape(self, Shape): + self.Shape = Shape + def get_CIRequired(self): + return self.CIRequired + def set_CIRequired(self, CIRequired): + self.CIRequired = CIRequired + def get_InvoiceDate(self): + return self.InvoiceDate + def set_InvoiceDate(self, InvoiceDate): + self.InvoiceDate = InvoiceDate + def get_InvoiceNumber(self): + return self.InvoiceNumber + def set_InvoiceNumber(self, InvoiceNumber): + self.InvoiceNumber = InvoiceNumber + def get_CustomerOrderNumber(self): + return self.CustomerOrderNumber + def set_CustomerOrderNumber(self, CustomerOrderNumber): + self.CustomerOrderNumber = CustomerOrderNumber + def get_CustOrderNumber(self): + return self.CustOrderNumber + def set_CustOrderNumber(self, CustOrderNumber): + self.CustOrderNumber = CustOrderNumber + def get_TermsDelivery(self): + return self.TermsDelivery + def set_TermsDelivery(self, TermsDelivery): + self.TermsDelivery = TermsDelivery + def get_TermsDeliveryOther(self): + return self.TermsDeliveryOther + def set_TermsDeliveryOther(self, TermsDeliveryOther): + self.TermsDeliveryOther = TermsDeliveryOther + def get_PackingCost(self): + return self.PackingCost + def set_PackingCost(self, PackingCost): + self.PackingCost = PackingCost + def get_CountryUltDest(self): + return self.CountryUltDest + def set_CountryUltDest(self, CountryUltDest): + self.CountryUltDest = CountryUltDest + def get_CIAgreement(self): + return self.CIAgreement + def set_CIAgreement(self, CIAgreement): + self.CIAgreement = CIAgreement + def get_ImageType(self): + return self.ImageType + def set_ImageType(self, ImageType): + self.ImageType = ImageType + def get_ImageLayout(self): + return self.ImageLayout + def set_ImageLayout(self, ImageLayout): + self.ImageLayout = ImageLayout + def get_CustomerRefNo(self): + return self.CustomerRefNo + def set_CustomerRefNo(self, CustomerRefNo): + self.CustomerRefNo = CustomerRefNo + def get_CustomerRefNo2(self): + return self.CustomerRefNo2 + def set_CustomerRefNo2(self, CustomerRefNo2): + self.CustomerRefNo2 = CustomerRefNo2 + def get_ShipDate(self): + return self.ShipDate + def set_ShipDate(self, ShipDate): + self.ShipDate = ShipDate + def get_HoldForManifest(self): + return self.HoldForManifest + def set_HoldForManifest(self, HoldForManifest): + self.HoldForManifest = HoldForManifest + def get_PriceOptions(self): + return self.PriceOptions + def set_PriceOptions(self, PriceOptions): + self.PriceOptions = PriceOptions + def get_CommercialShipment(self): + return self.CommercialShipment + def set_CommercialShipment(self, CommercialShipment): + self.CommercialShipment = CommercialShipment + def get_BuyerFirstName(self): + return self.BuyerFirstName + def set_BuyerFirstName(self, BuyerFirstName): + self.BuyerFirstName = BuyerFirstName + def get_BuyerLastName(self): + return self.BuyerLastName + def set_BuyerLastName(self, BuyerLastName): + self.BuyerLastName = BuyerLastName + def get_BuyerAddress1(self): + return self.BuyerAddress1 + def set_BuyerAddress1(self, BuyerAddress1): + self.BuyerAddress1 = BuyerAddress1 + def get_BuyerAddress2(self): + return self.BuyerAddress2 + def set_BuyerAddress2(self, BuyerAddress2): + self.BuyerAddress2 = BuyerAddress2 + def get_BuyerAddress3(self): + return self.BuyerAddress3 + def set_BuyerAddress3(self, BuyerAddress3): + self.BuyerAddress3 = BuyerAddress3 + def get_BuyerCity(self): + return self.BuyerCity + def set_BuyerCity(self, BuyerCity): + self.BuyerCity = BuyerCity + def get_BuyerState(self): + return self.BuyerState + def set_BuyerState(self, BuyerState): + self.BuyerState = BuyerState + def get_BuyerPostalCode(self): + return self.BuyerPostalCode + def set_BuyerPostalCode(self, BuyerPostalCode): + self.BuyerPostalCode = BuyerPostalCode + def get_BuyerCountry(self): + return self.BuyerCountry + def set_BuyerCountry(self, BuyerCountry): + self.BuyerCountry = BuyerCountry + def get_BuyerTaxID(self): + return self.BuyerTaxID + def set_BuyerTaxID(self, BuyerTaxID): + self.BuyerTaxID = BuyerTaxID + def get_BuyerRecipient(self): + return self.BuyerRecipient + def set_BuyerRecipient(self, BuyerRecipient): + self.BuyerRecipient = BuyerRecipient + def get_TermsPayment(self): + return self.TermsPayment + def set_TermsPayment(self, TermsPayment): + self.TermsPayment = TermsPayment + def get_ActionCode(self): + return self.ActionCode + def set_ActionCode(self, ActionCode): + self.ActionCode = ActionCode + def get_OptOutOfSPE(self): + return self.OptOutOfSPE + def set_OptOutOfSPE(self, OptOutOfSPE): + self.OptOutOfSPE = OptOutOfSPE + def get_PermitNumber(self): + return self.PermitNumber + def set_PermitNumber(self, PermitNumber): + self.PermitNumber = PermitNumber + def get_AccountZipCode(self): + return self.AccountZipCode + def set_AccountZipCode(self, AccountZipCode): + self.AccountZipCode = AccountZipCode + def get_Machinable(self): + return self.Machinable + def set_Machinable(self, Machinable): + self.Machinable = Machinable + def get_DestinationRateIndicator(self): + return self.DestinationRateIndicator + def set_DestinationRateIndicator(self, DestinationRateIndicator): + self.DestinationRateIndicator = DestinationRateIndicator + def get_MID(self): + return self.MID + def set_MID(self, MID): + self.MID = MID + def get_LogisticsManagerMID(self): + return self.LogisticsManagerMID + def set_LogisticsManagerMID(self, LogisticsManagerMID): + self.LogisticsManagerMID = LogisticsManagerMID + def get_CRID(self): + return self.CRID + def set_CRID(self, CRID): + self.CRID = CRID + def get_VendorCode(self): + return self.VendorCode + def set_VendorCode(self, VendorCode): + self.VendorCode = VendorCode + def get_VendorProductVersionNumber(self): + return self.VendorProductVersionNumber + def set_VendorProductVersionNumber(self, VendorProductVersionNumber): + self.VendorProductVersionNumber = VendorProductVersionNumber + def get_OverrideMID(self): + return self.OverrideMID + def set_OverrideMID(self, OverrideMID): + self.OverrideMID = OverrideMID + def get_ChargebackCode(self): + return self.ChargebackCode + def set_ChargebackCode(self, ChargebackCode): + self.ChargebackCode = ChargebackCode + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.Option is not None or + self.Revision is not None or + self.ImageParameters is not None or + self.FromFirstName is not None or + self.FromMiddleInitial is not None or + self.FromLastName is not None or + self.FromFirm is not None or + self.FromAddress1 is not None or + self.FromAddress2 is not None or + self.FromUrbanization is not None or + self.FromCity is not None or + self.FromState is not None or + self.FromZIP5 is not None or + self.FromZIP4 is not None or + self.FromPhone is not None or + self.ShipFromZIP is not None or + self.SenderEMail is not None or + self.ToFirstName is not None or + self.ToLastName is not None or + self.ToFirm is not None or + self.ToAddress1 is not None or + self.ToAddress2 is not None or + self.ToAddress3 is not None or + self.ToPostalCode is not None or + self.ToPhone is not None or + self.RecipientEMail is not None or + self.ToDPID is not None or + self.ToProvince is not None or + self.ToTaxID is not None or + self.Container is not None or + self.ContentType is not None or + self.ShippingContents is not None or + self.PurposeOfShipment is not None or + self.PartiesToTransaction is not None or + self.Agreement is not None or + self.Postage is not None or + self.InsuredValue is not None or + self.GrossPounds is not None or + self.GrossOunces is not None or + self.Length is not None or + self.Width is not None or + self.Height is not None or + self.Girth is not None or + self.Shape is not None or + self.CIRequired is not None or + self.InvoiceDate is not None or + self.InvoiceNumber is not None or + self.CustomerOrderNumber is not None or + self.CustOrderNumber is not None or + self.TermsDelivery is not None or + self.TermsDeliveryOther is not None or + self.PackingCost is not None or + self.CountryUltDest is not None or + self.CIAgreement is not None or + self.ImageType is not None or + self.ImageLayout is not None or + self.CustomerRefNo is not None or + self.CustomerRefNo2 is not None or + self.ShipDate is not None or + self.HoldForManifest is not None or + self.PriceOptions is not None or + self.CommercialShipment is not None or + self.BuyerFirstName is not None or + self.BuyerLastName is not None or + self.BuyerAddress1 is not None or + self.BuyerAddress2 is not None or + self.BuyerAddress3 is not None or + self.BuyerCity is not None or + self.BuyerState is not None or + self.BuyerPostalCode is not None or + self.BuyerCountry is not None or + self.BuyerTaxID is not None or + self.BuyerRecipient is not None or + self.TermsPayment is not None or + self.ActionCode is not None or + self.OptOutOfSPE is not None or + self.PermitNumber is not None or + self.AccountZipCode is not None or + self.Machinable is not None or + self.DestinationRateIndicator is not None or + self.MID is not None or + self.LogisticsManagerMID is not None or + self.CRID is not None or + self.VendorCode is not None or + self.VendorProductVersionNumber is not None or + self.OverrideMID is not None or + self.ChargebackCode is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSGXGGetLabelRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('eVSGXGGetLabelRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'eVSGXGGetLabelRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='eVSGXGGetLabelRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='eVSGXGGetLabelRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='eVSGXGGetLabelRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSGXGGetLabelRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Option is not None: + namespaceprefix_ = self.Option_nsprefix_ + ':' if (UseCapturedNS_ and self.Option_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOption>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Option), input_name='Option')), namespaceprefix_ , eol_)) + if self.Revision is not None: + namespaceprefix_ = self.Revision_nsprefix_ + ':' if (UseCapturedNS_ and self.Revision_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRevision>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Revision), input_name='Revision')), namespaceprefix_ , eol_)) + if self.ImageParameters is not None: + namespaceprefix_ = self.ImageParameters_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageParameters_nsprefix_) else '' + self.ImageParameters.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ImageParameters', pretty_print=pretty_print) + if self.FromFirstName is not None: + namespaceprefix_ = self.FromFirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.FromFirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromFirstName), input_name='FromFirstName')), namespaceprefix_ , eol_)) + if self.FromMiddleInitial is not None: + namespaceprefix_ = self.FromMiddleInitial_nsprefix_ + ':' if (UseCapturedNS_ and self.FromMiddleInitial_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromMiddleInitial>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromMiddleInitial), input_name='FromMiddleInitial')), namespaceprefix_ , eol_)) + if self.FromLastName is not None: + namespaceprefix_ = self.FromLastName_nsprefix_ + ':' if (UseCapturedNS_ and self.FromLastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromLastName), input_name='FromLastName')), namespaceprefix_ , eol_)) + if self.FromFirm is not None: + namespaceprefix_ = self.FromFirm_nsprefix_ + ':' if (UseCapturedNS_ and self.FromFirm_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromFirm>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromFirm), input_name='FromFirm')), namespaceprefix_ , eol_)) + if self.FromAddress1 is not None: + namespaceprefix_ = self.FromAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.FromAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromAddress1), input_name='FromAddress1')), namespaceprefix_ , eol_)) + if self.FromAddress2 is not None: + namespaceprefix_ = self.FromAddress2_nsprefix_ + ':' if (UseCapturedNS_ and self.FromAddress2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromAddress2), input_name='FromAddress2')), namespaceprefix_ , eol_)) + if self.FromUrbanization is not None: + namespaceprefix_ = self.FromUrbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.FromUrbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromUrbanization), input_name='FromUrbanization')), namespaceprefix_ , eol_)) + if self.FromCity is not None: + namespaceprefix_ = self.FromCity_nsprefix_ + ':' if (UseCapturedNS_ and self.FromCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromCity), input_name='FromCity')), namespaceprefix_ , eol_)) + if self.FromState is not None: + namespaceprefix_ = self.FromState_nsprefix_ + ':' if (UseCapturedNS_ and self.FromState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromState), input_name='FromState')), namespaceprefix_ , eol_)) + if self.FromZIP5 is not None: + namespaceprefix_ = self.FromZIP5_nsprefix_ + ':' if (UseCapturedNS_ and self.FromZIP5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromZIP5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromZIP5), input_name='FromZIP5')), namespaceprefix_ , eol_)) + if self.FromZIP4 is not None: + namespaceprefix_ = self.FromZIP4_nsprefix_ + ':' if (UseCapturedNS_ and self.FromZIP4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromZIP4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromZIP4), input_name='FromZIP4')), namespaceprefix_ , eol_)) + if self.FromPhone is not None: + namespaceprefix_ = self.FromPhone_nsprefix_ + ':' if (UseCapturedNS_ and self.FromPhone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromPhone), input_name='FromPhone')), namespaceprefix_ , eol_)) + if self.ShipFromZIP is not None: + namespaceprefix_ = self.ShipFromZIP_nsprefix_ + ':' if (UseCapturedNS_ and self.ShipFromZIP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sShipFromZIP>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ShipFromZIP), input_name='ShipFromZIP')), namespaceprefix_ , eol_)) + if self.SenderEMail is not None: + namespaceprefix_ = self.SenderEMail_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderEMail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderEMail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderEMail), input_name='SenderEMail')), namespaceprefix_ , eol_)) + if self.ToFirstName is not None: + namespaceprefix_ = self.ToFirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.ToFirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToFirstName), input_name='ToFirstName')), namespaceprefix_ , eol_)) + if self.ToLastName is not None: + namespaceprefix_ = self.ToLastName_nsprefix_ + ':' if (UseCapturedNS_ and self.ToLastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToLastName), input_name='ToLastName')), namespaceprefix_ , eol_)) + if self.ToFirm is not None: + namespaceprefix_ = self.ToFirm_nsprefix_ + ':' if (UseCapturedNS_ and self.ToFirm_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToFirm>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToFirm), input_name='ToFirm')), namespaceprefix_ , eol_)) + if self.ToAddress1 is not None: + namespaceprefix_ = self.ToAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress1), input_name='ToAddress1')), namespaceprefix_ , eol_)) + if self.ToAddress2 is not None: + namespaceprefix_ = self.ToAddress2_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress2), input_name='ToAddress2')), namespaceprefix_ , eol_)) + if self.ToAddress3 is not None: + namespaceprefix_ = self.ToAddress3_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress3_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress3>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress3), input_name='ToAddress3')), namespaceprefix_ , eol_)) + if self.ToPostalCode is not None: + namespaceprefix_ = self.ToPostalCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ToPostalCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToPostalCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToPostalCode), input_name='ToPostalCode')), namespaceprefix_ , eol_)) + if self.ToPhone is not None: + namespaceprefix_ = self.ToPhone_nsprefix_ + ':' if (UseCapturedNS_ and self.ToPhone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToPhone), input_name='ToPhone')), namespaceprefix_ , eol_)) + if self.RecipientEMail is not None: + namespaceprefix_ = self.RecipientEMail_nsprefix_ + ':' if (UseCapturedNS_ and self.RecipientEMail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRecipientEMail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RecipientEMail), input_name='RecipientEMail')), namespaceprefix_ , eol_)) + if self.ToDPID is not None: + namespaceprefix_ = self.ToDPID_nsprefix_ + ':' if (UseCapturedNS_ and self.ToDPID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToDPID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToDPID), input_name='ToDPID')), namespaceprefix_ , eol_)) + if self.ToProvince is not None: + namespaceprefix_ = self.ToProvince_nsprefix_ + ':' if (UseCapturedNS_ and self.ToProvince_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToProvince>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToProvince), input_name='ToProvince')), namespaceprefix_ , eol_)) + if self.ToTaxID is not None: + namespaceprefix_ = self.ToTaxID_nsprefix_ + ':' if (UseCapturedNS_ and self.ToTaxID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToTaxID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToTaxID), input_name='ToTaxID')), namespaceprefix_ , eol_)) + if self.Container is not None: + namespaceprefix_ = self.Container_nsprefix_ + ':' if (UseCapturedNS_ and self.Container_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContainer>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Container), input_name='Container')), namespaceprefix_ , eol_)) + if self.ContentType is not None: + namespaceprefix_ = self.ContentType_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentType), input_name='ContentType')), namespaceprefix_ , eol_)) + if self.ShippingContents is not None: + namespaceprefix_ = self.ShippingContents_nsprefix_ + ':' if (UseCapturedNS_ and self.ShippingContents_nsprefix_) else '' + self.ShippingContents.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ShippingContents', pretty_print=pretty_print) + if self.PurposeOfShipment is not None: + namespaceprefix_ = self.PurposeOfShipment_nsprefix_ + ':' if (UseCapturedNS_ and self.PurposeOfShipment_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPurposeOfShipment>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PurposeOfShipment), input_name='PurposeOfShipment')), namespaceprefix_ , eol_)) + if self.PartiesToTransaction is not None: + namespaceprefix_ = self.PartiesToTransaction_nsprefix_ + ':' if (UseCapturedNS_ and self.PartiesToTransaction_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPartiesToTransaction>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PartiesToTransaction), input_name='PartiesToTransaction')), namespaceprefix_ , eol_)) + if self.Agreement is not None: + namespaceprefix_ = self.Agreement_nsprefix_ + ':' if (UseCapturedNS_ and self.Agreement_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAgreement>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Agreement), input_name='Agreement')), namespaceprefix_ , eol_)) + if self.Postage is not None: + namespaceprefix_ = self.Postage_nsprefix_ + ':' if (UseCapturedNS_ and self.Postage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPostage>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Postage), input_name='Postage')), namespaceprefix_ , eol_)) + if self.InsuredValue is not None: + namespaceprefix_ = self.InsuredValue_nsprefix_ + ':' if (UseCapturedNS_ and self.InsuredValue_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInsuredValue>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InsuredValue), input_name='InsuredValue')), namespaceprefix_ , eol_)) + if self.GrossPounds is not None: + namespaceprefix_ = self.GrossPounds_nsprefix_ + ':' if (UseCapturedNS_ and self.GrossPounds_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGrossPounds>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.GrossPounds, input_name='GrossPounds'), namespaceprefix_ , eol_)) + if self.GrossOunces is not None: + namespaceprefix_ = self.GrossOunces_nsprefix_ + ':' if (UseCapturedNS_ and self.GrossOunces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGrossOunces>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.GrossOunces, input_name='GrossOunces'), namespaceprefix_ , eol_)) + if self.Length is not None: + namespaceprefix_ = self.Length_nsprefix_ + ':' if (UseCapturedNS_ and self.Length_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLength>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Length, input_name='Length'), namespaceprefix_ , eol_)) + if self.Width is not None: + namespaceprefix_ = self.Width_nsprefix_ + ':' if (UseCapturedNS_ and self.Width_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sWidth>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Width, input_name='Width'), namespaceprefix_ , eol_)) + if self.Height is not None: + namespaceprefix_ = self.Height_nsprefix_ + ':' if (UseCapturedNS_ and self.Height_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHeight>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Height, input_name='Height'), namespaceprefix_ , eol_)) + if self.Girth is not None: + namespaceprefix_ = self.Girth_nsprefix_ + ':' if (UseCapturedNS_ and self.Girth_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGirth>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Girth, input_name='Girth'), namespaceprefix_ , eol_)) + if self.Shape is not None: + namespaceprefix_ = self.Shape_nsprefix_ + ':' if (UseCapturedNS_ and self.Shape_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sShape>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Shape), input_name='Shape')), namespaceprefix_ , eol_)) + if self.CIRequired is not None: + namespaceprefix_ = self.CIRequired_nsprefix_ + ':' if (UseCapturedNS_ and self.CIRequired_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCIRequired>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.CIRequired, input_name='CIRequired'), namespaceprefix_ , eol_)) + if self.InvoiceDate is not None: + namespaceprefix_ = self.InvoiceDate_nsprefix_ + ':' if (UseCapturedNS_ and self.InvoiceDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInvoiceDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InvoiceDate), input_name='InvoiceDate')), namespaceprefix_ , eol_)) + if self.InvoiceNumber is not None: + namespaceprefix_ = self.InvoiceNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.InvoiceNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInvoiceNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InvoiceNumber), input_name='InvoiceNumber')), namespaceprefix_ , eol_)) + if self.CustomerOrderNumber is not None: + namespaceprefix_ = self.CustomerOrderNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerOrderNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerOrderNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerOrderNumber), input_name='CustomerOrderNumber')), namespaceprefix_ , eol_)) + if self.CustOrderNumber is not None: + namespaceprefix_ = self.CustOrderNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.CustOrderNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustOrderNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustOrderNumber), input_name='CustOrderNumber')), namespaceprefix_ , eol_)) + if self.TermsDelivery is not None: + namespaceprefix_ = self.TermsDelivery_nsprefix_ + ':' if (UseCapturedNS_ and self.TermsDelivery_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTermsDelivery>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TermsDelivery), input_name='TermsDelivery')), namespaceprefix_ , eol_)) + if self.TermsDeliveryOther is not None: + namespaceprefix_ = self.TermsDeliveryOther_nsprefix_ + ':' if (UseCapturedNS_ and self.TermsDeliveryOther_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTermsDeliveryOther>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TermsDeliveryOther), input_name='TermsDeliveryOther')), namespaceprefix_ , eol_)) + if self.PackingCost is not None: + namespaceprefix_ = self.PackingCost_nsprefix_ + ':' if (UseCapturedNS_ and self.PackingCost_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPackingCost>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.PackingCost, input_name='PackingCost'), namespaceprefix_ , eol_)) + if self.CountryUltDest is not None: + namespaceprefix_ = self.CountryUltDest_nsprefix_ + ':' if (UseCapturedNS_ and self.CountryUltDest_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCountryUltDest>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CountryUltDest), input_name='CountryUltDest')), namespaceprefix_ , eol_)) + if self.CIAgreement is not None: + namespaceprefix_ = self.CIAgreement_nsprefix_ + ':' if (UseCapturedNS_ and self.CIAgreement_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCIAgreement>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CIAgreement), input_name='CIAgreement')), namespaceprefix_ , eol_)) + if self.ImageType is not None: + namespaceprefix_ = self.ImageType_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageType), input_name='ImageType')), namespaceprefix_ , eol_)) + if self.ImageLayout is not None: + namespaceprefix_ = self.ImageLayout_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageLayout_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageLayout>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageLayout), input_name='ImageLayout')), namespaceprefix_ , eol_)) + if self.CustomerRefNo is not None: + namespaceprefix_ = self.CustomerRefNo_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerRefNo_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerRefNo>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerRefNo), input_name='CustomerRefNo')), namespaceprefix_ , eol_)) + if self.CustomerRefNo2 is not None: + namespaceprefix_ = self.CustomerRefNo2_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerRefNo2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerRefNo2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerRefNo2), input_name='CustomerRefNo2')), namespaceprefix_ , eol_)) + if self.ShipDate is not None: + namespaceprefix_ = self.ShipDate_nsprefix_ + ':' if (UseCapturedNS_ and self.ShipDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sShipDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ShipDate), input_name='ShipDate')), namespaceprefix_ , eol_)) + if self.HoldForManifest is not None: + namespaceprefix_ = self.HoldForManifest_nsprefix_ + ':' if (UseCapturedNS_ and self.HoldForManifest_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHoldForManifest>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.HoldForManifest), input_name='HoldForManifest')), namespaceprefix_ , eol_)) + if self.PriceOptions is not None: + namespaceprefix_ = self.PriceOptions_nsprefix_ + ':' if (UseCapturedNS_ and self.PriceOptions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPriceOptions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PriceOptions), input_name='PriceOptions')), namespaceprefix_ , eol_)) + if self.CommercialShipment is not None: + namespaceprefix_ = self.CommercialShipment_nsprefix_ + ':' if (UseCapturedNS_ and self.CommercialShipment_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommercialShipment>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.CommercialShipment, input_name='CommercialShipment'), namespaceprefix_ , eol_)) + if self.BuyerFirstName is not None: + namespaceprefix_ = self.BuyerFirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.BuyerFirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBuyerFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BuyerFirstName), input_name='BuyerFirstName')), namespaceprefix_ , eol_)) + if self.BuyerLastName is not None: + namespaceprefix_ = self.BuyerLastName_nsprefix_ + ':' if (UseCapturedNS_ and self.BuyerLastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBuyerLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BuyerLastName), input_name='BuyerLastName')), namespaceprefix_ , eol_)) + if self.BuyerAddress1 is not None: + namespaceprefix_ = self.BuyerAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.BuyerAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBuyerAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BuyerAddress1), input_name='BuyerAddress1')), namespaceprefix_ , eol_)) + if self.BuyerAddress2 is not None: + namespaceprefix_ = self.BuyerAddress2_nsprefix_ + ':' if (UseCapturedNS_ and self.BuyerAddress2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBuyerAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BuyerAddress2), input_name='BuyerAddress2')), namespaceprefix_ , eol_)) + if self.BuyerAddress3 is not None: + namespaceprefix_ = self.BuyerAddress3_nsprefix_ + ':' if (UseCapturedNS_ and self.BuyerAddress3_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBuyerAddress3>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BuyerAddress3), input_name='BuyerAddress3')), namespaceprefix_ , eol_)) + if self.BuyerCity is not None: + namespaceprefix_ = self.BuyerCity_nsprefix_ + ':' if (UseCapturedNS_ and self.BuyerCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBuyerCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BuyerCity), input_name='BuyerCity')), namespaceprefix_ , eol_)) + if self.BuyerState is not None: + namespaceprefix_ = self.BuyerState_nsprefix_ + ':' if (UseCapturedNS_ and self.BuyerState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBuyerState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BuyerState), input_name='BuyerState')), namespaceprefix_ , eol_)) + if self.BuyerPostalCode is not None: + namespaceprefix_ = self.BuyerPostalCode_nsprefix_ + ':' if (UseCapturedNS_ and self.BuyerPostalCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBuyerPostalCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BuyerPostalCode), input_name='BuyerPostalCode')), namespaceprefix_ , eol_)) + if self.BuyerCountry is not None: + namespaceprefix_ = self.BuyerCountry_nsprefix_ + ':' if (UseCapturedNS_ and self.BuyerCountry_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBuyerCountry>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BuyerCountry), input_name='BuyerCountry')), namespaceprefix_ , eol_)) + if self.BuyerTaxID is not None: + namespaceprefix_ = self.BuyerTaxID_nsprefix_ + ':' if (UseCapturedNS_ and self.BuyerTaxID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBuyerTaxID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BuyerTaxID), input_name='BuyerTaxID')), namespaceprefix_ , eol_)) + if self.BuyerRecipient is not None: + namespaceprefix_ = self.BuyerRecipient_nsprefix_ + ':' if (UseCapturedNS_ and self.BuyerRecipient_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBuyerRecipient>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BuyerRecipient), input_name='BuyerRecipient')), namespaceprefix_ , eol_)) + if self.TermsPayment is not None: + namespaceprefix_ = self.TermsPayment_nsprefix_ + ':' if (UseCapturedNS_ and self.TermsPayment_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTermsPayment>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TermsPayment), input_name='TermsPayment')), namespaceprefix_ , eol_)) + if self.ActionCode is not None: + namespaceprefix_ = self.ActionCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ActionCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sActionCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ActionCode), input_name='ActionCode')), namespaceprefix_ , eol_)) + if self.OptOutOfSPE is not None: + namespaceprefix_ = self.OptOutOfSPE_nsprefix_ + ':' if (UseCapturedNS_ and self.OptOutOfSPE_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOptOutOfSPE>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.OptOutOfSPE, input_name='OptOutOfSPE'), namespaceprefix_ , eol_)) + if self.PermitNumber is not None: + namespaceprefix_ = self.PermitNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PermitNumber), input_name='PermitNumber')), namespaceprefix_ , eol_)) + if self.AccountZipCode is not None: + namespaceprefix_ = self.AccountZipCode_nsprefix_ + ':' if (UseCapturedNS_ and self.AccountZipCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAccountZipCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AccountZipCode), input_name='AccountZipCode')), namespaceprefix_ , eol_)) + if self.Machinable is not None: + namespaceprefix_ = self.Machinable_nsprefix_ + ':' if (UseCapturedNS_ and self.Machinable_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMachinable>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.Machinable, input_name='Machinable'), namespaceprefix_ , eol_)) + if self.DestinationRateIndicator is not None: + namespaceprefix_ = self.DestinationRateIndicator_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationRateIndicator_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationRateIndicator>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DestinationRateIndicator), input_name='DestinationRateIndicator')), namespaceprefix_ , eol_)) + if self.MID is not None: + namespaceprefix_ = self.MID_nsprefix_ + ':' if (UseCapturedNS_ and self.MID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MID), input_name='MID')), namespaceprefix_ , eol_)) + if self.LogisticsManagerMID is not None: + namespaceprefix_ = self.LogisticsManagerMID_nsprefix_ + ':' if (UseCapturedNS_ and self.LogisticsManagerMID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLogisticsManagerMID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LogisticsManagerMID), input_name='LogisticsManagerMID')), namespaceprefix_ , eol_)) + if self.CRID is not None: + namespaceprefix_ = self.CRID_nsprefix_ + ':' if (UseCapturedNS_ and self.CRID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCRID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CRID), input_name='CRID')), namespaceprefix_ , eol_)) + if self.VendorCode is not None: + namespaceprefix_ = self.VendorCode_nsprefix_ + ':' if (UseCapturedNS_ and self.VendorCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sVendorCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.VendorCode), input_name='VendorCode')), namespaceprefix_ , eol_)) + if self.VendorProductVersionNumber is not None: + namespaceprefix_ = self.VendorProductVersionNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.VendorProductVersionNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sVendorProductVersionNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.VendorProductVersionNumber), input_name='VendorProductVersionNumber')), namespaceprefix_ , eol_)) + if self.OverrideMID is not None: + namespaceprefix_ = self.OverrideMID_nsprefix_ + ':' if (UseCapturedNS_ and self.OverrideMID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOverrideMID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.OverrideMID), input_name='OverrideMID')), namespaceprefix_ , eol_)) + if self.ChargebackCode is not None: + namespaceprefix_ = self.ChargebackCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ChargebackCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sChargebackCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ChargebackCode), input_name='ChargebackCode')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Option': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Option') + value_ = self.gds_validate_string(value_, node, 'Option') + self.Option = value_ + self.Option_nsprefix_ = child_.prefix + elif nodeName_ == 'Revision': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Revision') + value_ = self.gds_validate_string(value_, node, 'Revision') + self.Revision = value_ + self.Revision_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageParameters': + obj_ = ImageParametersType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ImageParameters = obj_ + obj_.original_tagname_ = 'ImageParameters' + elif nodeName_ == 'FromFirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromFirstName') + value_ = self.gds_validate_string(value_, node, 'FromFirstName') + self.FromFirstName = value_ + self.FromFirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'FromMiddleInitial': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromMiddleInitial') + value_ = self.gds_validate_string(value_, node, 'FromMiddleInitial') + self.FromMiddleInitial = value_ + self.FromMiddleInitial_nsprefix_ = child_.prefix + elif nodeName_ == 'FromLastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromLastName') + value_ = self.gds_validate_string(value_, node, 'FromLastName') + self.FromLastName = value_ + self.FromLastName_nsprefix_ = child_.prefix + elif nodeName_ == 'FromFirm': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromFirm') + value_ = self.gds_validate_string(value_, node, 'FromFirm') + self.FromFirm = value_ + self.FromFirm_nsprefix_ = child_.prefix + elif nodeName_ == 'FromAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromAddress1') + value_ = self.gds_validate_string(value_, node, 'FromAddress1') + self.FromAddress1 = value_ + self.FromAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'FromAddress2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromAddress2') + value_ = self.gds_validate_string(value_, node, 'FromAddress2') + self.FromAddress2 = value_ + self.FromAddress2_nsprefix_ = child_.prefix + elif nodeName_ == 'FromUrbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromUrbanization') + value_ = self.gds_validate_string(value_, node, 'FromUrbanization') + self.FromUrbanization = value_ + self.FromUrbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'FromCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromCity') + value_ = self.gds_validate_string(value_, node, 'FromCity') + self.FromCity = value_ + self.FromCity_nsprefix_ = child_.prefix + elif nodeName_ == 'FromState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromState') + value_ = self.gds_validate_string(value_, node, 'FromState') + self.FromState = value_ + self.FromState_nsprefix_ = child_.prefix + elif nodeName_ == 'FromZIP5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromZIP5') + value_ = self.gds_validate_string(value_, node, 'FromZIP5') + self.FromZIP5 = value_ + self.FromZIP5_nsprefix_ = child_.prefix + elif nodeName_ == 'FromZIP4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromZIP4') + value_ = self.gds_validate_string(value_, node, 'FromZIP4') + self.FromZIP4 = value_ + self.FromZIP4_nsprefix_ = child_.prefix + elif nodeName_ == 'FromPhone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromPhone') + value_ = self.gds_validate_string(value_, node, 'FromPhone') + self.FromPhone = value_ + self.FromPhone_nsprefix_ = child_.prefix + elif nodeName_ == 'ShipFromZIP': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ShipFromZIP') + value_ = self.gds_validate_string(value_, node, 'ShipFromZIP') + self.ShipFromZIP = value_ + self.ShipFromZIP_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderEMail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderEMail') + value_ = self.gds_validate_string(value_, node, 'SenderEMail') + self.SenderEMail = value_ + self.SenderEMail_nsprefix_ = child_.prefix + elif nodeName_ == 'ToFirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToFirstName') + value_ = self.gds_validate_string(value_, node, 'ToFirstName') + self.ToFirstName = value_ + self.ToFirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'ToLastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToLastName') + value_ = self.gds_validate_string(value_, node, 'ToLastName') + self.ToLastName = value_ + self.ToLastName_nsprefix_ = child_.prefix + elif nodeName_ == 'ToFirm': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToFirm') + value_ = self.gds_validate_string(value_, node, 'ToFirm') + self.ToFirm = value_ + self.ToFirm_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress1') + value_ = self.gds_validate_string(value_, node, 'ToAddress1') + self.ToAddress1 = value_ + self.ToAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress2') + value_ = self.gds_validate_string(value_, node, 'ToAddress2') + self.ToAddress2 = value_ + self.ToAddress2_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress3': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress3') + value_ = self.gds_validate_string(value_, node, 'ToAddress3') + self.ToAddress3 = value_ + self.ToAddress3_nsprefix_ = child_.prefix + elif nodeName_ == 'ToPostalCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToPostalCode') + value_ = self.gds_validate_string(value_, node, 'ToPostalCode') + self.ToPostalCode = value_ + self.ToPostalCode_nsprefix_ = child_.prefix + elif nodeName_ == 'ToPhone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToPhone') + value_ = self.gds_validate_string(value_, node, 'ToPhone') + self.ToPhone = value_ + self.ToPhone_nsprefix_ = child_.prefix + elif nodeName_ == 'RecipientEMail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RecipientEMail') + value_ = self.gds_validate_string(value_, node, 'RecipientEMail') + self.RecipientEMail = value_ + self.RecipientEMail_nsprefix_ = child_.prefix + elif nodeName_ == 'ToDPID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToDPID') + value_ = self.gds_validate_string(value_, node, 'ToDPID') + self.ToDPID = value_ + self.ToDPID_nsprefix_ = child_.prefix + elif nodeName_ == 'ToProvince': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToProvince') + value_ = self.gds_validate_string(value_, node, 'ToProvince') + self.ToProvince = value_ + self.ToProvince_nsprefix_ = child_.prefix + elif nodeName_ == 'ToTaxID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToTaxID') + value_ = self.gds_validate_string(value_, node, 'ToTaxID') + self.ToTaxID = value_ + self.ToTaxID_nsprefix_ = child_.prefix + elif nodeName_ == 'Container': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Container') + value_ = self.gds_validate_string(value_, node, 'Container') + self.Container = value_ + self.Container_nsprefix_ = child_.prefix + elif nodeName_ == 'ContentType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentType') + value_ = self.gds_validate_string(value_, node, 'ContentType') + self.ContentType = value_ + self.ContentType_nsprefix_ = child_.prefix + elif nodeName_ == 'ShippingContents': + obj_ = ShippingContentsType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ShippingContents = obj_ + obj_.original_tagname_ = 'ShippingContents' + elif nodeName_ == 'PurposeOfShipment': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PurposeOfShipment') + value_ = self.gds_validate_string(value_, node, 'PurposeOfShipment') + self.PurposeOfShipment = value_ + self.PurposeOfShipment_nsprefix_ = child_.prefix + elif nodeName_ == 'PartiesToTransaction': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PartiesToTransaction') + value_ = self.gds_validate_string(value_, node, 'PartiesToTransaction') + self.PartiesToTransaction = value_ + self.PartiesToTransaction_nsprefix_ = child_.prefix + elif nodeName_ == 'Agreement': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Agreement') + value_ = self.gds_validate_string(value_, node, 'Agreement') + self.Agreement = value_ + self.Agreement_nsprefix_ = child_.prefix + elif nodeName_ == 'Postage': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Postage') + value_ = self.gds_validate_string(value_, node, 'Postage') + self.Postage = value_ + self.Postage_nsprefix_ = child_.prefix + elif nodeName_ == 'InsuredValue': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'InsuredValue') + value_ = self.gds_validate_string(value_, node, 'InsuredValue') + self.InsuredValue = value_ + self.InsuredValue_nsprefix_ = child_.prefix + elif nodeName_ == 'GrossPounds' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'GrossPounds') + fval_ = self.gds_validate_decimal(fval_, node, 'GrossPounds') + self.GrossPounds = fval_ + self.GrossPounds_nsprefix_ = child_.prefix + elif nodeName_ == 'GrossOunces' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'GrossOunces') + fval_ = self.gds_validate_decimal(fval_, node, 'GrossOunces') + self.GrossOunces = fval_ + self.GrossOunces_nsprefix_ = child_.prefix + elif nodeName_ == 'Length' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Length') + fval_ = self.gds_validate_decimal(fval_, node, 'Length') + self.Length = fval_ + self.Length_nsprefix_ = child_.prefix + elif nodeName_ == 'Width' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Width') + fval_ = self.gds_validate_decimal(fval_, node, 'Width') + self.Width = fval_ + self.Width_nsprefix_ = child_.prefix + elif nodeName_ == 'Height' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Height') + fval_ = self.gds_validate_decimal(fval_, node, 'Height') + self.Height = fval_ + self.Height_nsprefix_ = child_.prefix + elif nodeName_ == 'Girth' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Girth') + fval_ = self.gds_validate_decimal(fval_, node, 'Girth') + self.Girth = fval_ + self.Girth_nsprefix_ = child_.prefix + elif nodeName_ == 'Shape': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Shape') + value_ = self.gds_validate_string(value_, node, 'Shape') + self.Shape = value_ + self.Shape_nsprefix_ = child_.prefix + elif nodeName_ == 'CIRequired': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'CIRequired') + ival_ = self.gds_validate_boolean(ival_, node, 'CIRequired') + self.CIRequired = ival_ + self.CIRequired_nsprefix_ = child_.prefix + elif nodeName_ == 'InvoiceDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'InvoiceDate') + value_ = self.gds_validate_string(value_, node, 'InvoiceDate') + self.InvoiceDate = value_ + self.InvoiceDate_nsprefix_ = child_.prefix + elif nodeName_ == 'InvoiceNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'InvoiceNumber') + value_ = self.gds_validate_string(value_, node, 'InvoiceNumber') + self.InvoiceNumber = value_ + self.InvoiceNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerOrderNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerOrderNumber') + value_ = self.gds_validate_string(value_, node, 'CustomerOrderNumber') + self.CustomerOrderNumber = value_ + self.CustomerOrderNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'CustOrderNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustOrderNumber') + value_ = self.gds_validate_string(value_, node, 'CustOrderNumber') + self.CustOrderNumber = value_ + self.CustOrderNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'TermsDelivery': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'TermsDelivery') + value_ = self.gds_validate_string(value_, node, 'TermsDelivery') + self.TermsDelivery = value_ + self.TermsDelivery_nsprefix_ = child_.prefix + elif nodeName_ == 'TermsDeliveryOther': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'TermsDeliveryOther') + value_ = self.gds_validate_string(value_, node, 'TermsDeliveryOther') + self.TermsDeliveryOther = value_ + self.TermsDeliveryOther_nsprefix_ = child_.prefix + elif nodeName_ == 'PackingCost' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'PackingCost') + fval_ = self.gds_validate_decimal(fval_, node, 'PackingCost') + self.PackingCost = fval_ + self.PackingCost_nsprefix_ = child_.prefix + elif nodeName_ == 'CountryUltDest': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CountryUltDest') + value_ = self.gds_validate_string(value_, node, 'CountryUltDest') + self.CountryUltDest = value_ + self.CountryUltDest_nsprefix_ = child_.prefix + elif nodeName_ == 'CIAgreement': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CIAgreement') + value_ = self.gds_validate_string(value_, node, 'CIAgreement') + self.CIAgreement = value_ + self.CIAgreement_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageType') + value_ = self.gds_validate_string(value_, node, 'ImageType') + self.ImageType = value_ + self.ImageType_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageLayout': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageLayout') + value_ = self.gds_validate_string(value_, node, 'ImageLayout') + self.ImageLayout = value_ + self.ImageLayout_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerRefNo': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerRefNo') + value_ = self.gds_validate_string(value_, node, 'CustomerRefNo') + self.CustomerRefNo = value_ + self.CustomerRefNo_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerRefNo2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerRefNo2') + value_ = self.gds_validate_string(value_, node, 'CustomerRefNo2') + self.CustomerRefNo2 = value_ + self.CustomerRefNo2_nsprefix_ = child_.prefix + elif nodeName_ == 'ShipDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ShipDate') + value_ = self.gds_validate_string(value_, node, 'ShipDate') + self.ShipDate = value_ + self.ShipDate_nsprefix_ = child_.prefix + elif nodeName_ == 'HoldForManifest': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'HoldForManifest') + value_ = self.gds_validate_string(value_, node, 'HoldForManifest') + self.HoldForManifest = value_ + self.HoldForManifest_nsprefix_ = child_.prefix + elif nodeName_ == 'PriceOptions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PriceOptions') + value_ = self.gds_validate_string(value_, node, 'PriceOptions') + self.PriceOptions = value_ + self.PriceOptions_nsprefix_ = child_.prefix + elif nodeName_ == 'CommercialShipment': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'CommercialShipment') + ival_ = self.gds_validate_boolean(ival_, node, 'CommercialShipment') + self.CommercialShipment = ival_ + self.CommercialShipment_nsprefix_ = child_.prefix + elif nodeName_ == 'BuyerFirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BuyerFirstName') + value_ = self.gds_validate_string(value_, node, 'BuyerFirstName') + self.BuyerFirstName = value_ + self.BuyerFirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'BuyerLastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BuyerLastName') + value_ = self.gds_validate_string(value_, node, 'BuyerLastName') + self.BuyerLastName = value_ + self.BuyerLastName_nsprefix_ = child_.prefix + elif nodeName_ == 'BuyerAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BuyerAddress1') + value_ = self.gds_validate_string(value_, node, 'BuyerAddress1') + self.BuyerAddress1 = value_ + self.BuyerAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'BuyerAddress2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BuyerAddress2') + value_ = self.gds_validate_string(value_, node, 'BuyerAddress2') + self.BuyerAddress2 = value_ + self.BuyerAddress2_nsprefix_ = child_.prefix + elif nodeName_ == 'BuyerAddress3': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BuyerAddress3') + value_ = self.gds_validate_string(value_, node, 'BuyerAddress3') + self.BuyerAddress3 = value_ + self.BuyerAddress3_nsprefix_ = child_.prefix + elif nodeName_ == 'BuyerCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BuyerCity') + value_ = self.gds_validate_string(value_, node, 'BuyerCity') + self.BuyerCity = value_ + self.BuyerCity_nsprefix_ = child_.prefix + elif nodeName_ == 'BuyerState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BuyerState') + value_ = self.gds_validate_string(value_, node, 'BuyerState') + self.BuyerState = value_ + self.BuyerState_nsprefix_ = child_.prefix + elif nodeName_ == 'BuyerPostalCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BuyerPostalCode') + value_ = self.gds_validate_string(value_, node, 'BuyerPostalCode') + self.BuyerPostalCode = value_ + self.BuyerPostalCode_nsprefix_ = child_.prefix + elif nodeName_ == 'BuyerCountry': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BuyerCountry') + value_ = self.gds_validate_string(value_, node, 'BuyerCountry') + self.BuyerCountry = value_ + self.BuyerCountry_nsprefix_ = child_.prefix + elif nodeName_ == 'BuyerTaxID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BuyerTaxID') + value_ = self.gds_validate_string(value_, node, 'BuyerTaxID') + self.BuyerTaxID = value_ + self.BuyerTaxID_nsprefix_ = child_.prefix + elif nodeName_ == 'BuyerRecipient': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BuyerRecipient') + value_ = self.gds_validate_string(value_, node, 'BuyerRecipient') + self.BuyerRecipient = value_ + self.BuyerRecipient_nsprefix_ = child_.prefix + elif nodeName_ == 'TermsPayment': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'TermsPayment') + value_ = self.gds_validate_string(value_, node, 'TermsPayment') + self.TermsPayment = value_ + self.TermsPayment_nsprefix_ = child_.prefix + elif nodeName_ == 'ActionCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ActionCode') + value_ = self.gds_validate_string(value_, node, 'ActionCode') + self.ActionCode = value_ + self.ActionCode_nsprefix_ = child_.prefix + elif nodeName_ == 'OptOutOfSPE': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'OptOutOfSPE') + ival_ = self.gds_validate_boolean(ival_, node, 'OptOutOfSPE') + self.OptOutOfSPE = ival_ + self.OptOutOfSPE_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PermitNumber') + value_ = self.gds_validate_string(value_, node, 'PermitNumber') + self.PermitNumber = value_ + self.PermitNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'AccountZipCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AccountZipCode') + value_ = self.gds_validate_string(value_, node, 'AccountZipCode') + self.AccountZipCode = value_ + self.AccountZipCode_nsprefix_ = child_.prefix + elif nodeName_ == 'Machinable': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'Machinable') + ival_ = self.gds_validate_boolean(ival_, node, 'Machinable') + self.Machinable = ival_ + self.Machinable_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationRateIndicator': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DestinationRateIndicator') + value_ = self.gds_validate_string(value_, node, 'DestinationRateIndicator') + self.DestinationRateIndicator = value_ + self.DestinationRateIndicator_nsprefix_ = child_.prefix + elif nodeName_ == 'MID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'MID') + value_ = self.gds_validate_string(value_, node, 'MID') + self.MID = value_ + self.MID_nsprefix_ = child_.prefix + elif nodeName_ == 'LogisticsManagerMID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LogisticsManagerMID') + value_ = self.gds_validate_string(value_, node, 'LogisticsManagerMID') + self.LogisticsManagerMID = value_ + self.LogisticsManagerMID_nsprefix_ = child_.prefix + elif nodeName_ == 'CRID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CRID') + value_ = self.gds_validate_string(value_, node, 'CRID') + self.CRID = value_ + self.CRID_nsprefix_ = child_.prefix + elif nodeName_ == 'VendorCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'VendorCode') + value_ = self.gds_validate_string(value_, node, 'VendorCode') + self.VendorCode = value_ + self.VendorCode_nsprefix_ = child_.prefix + elif nodeName_ == 'VendorProductVersionNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'VendorProductVersionNumber') + value_ = self.gds_validate_string(value_, node, 'VendorProductVersionNumber') + self.VendorProductVersionNumber = value_ + self.VendorProductVersionNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'OverrideMID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'OverrideMID') + value_ = self.gds_validate_string(value_, node, 'OverrideMID') + self.OverrideMID = value_ + self.OverrideMID_nsprefix_ = child_.prefix + elif nodeName_ == 'ChargebackCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ChargebackCode') + value_ = self.gds_validate_string(value_, node, 'ChargebackCode') + self.ChargebackCode = value_ + self.ChargebackCode_nsprefix_ = child_.prefix +# end class eVSGXGGetLabelRequest + + +class ImageParametersType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ImageParameter=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ImageParameter = ImageParameter + self.ImageParameter_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ImageParametersType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ImageParametersType.subclass: + return ImageParametersType.subclass(*args_, **kwargs_) + else: + return ImageParametersType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ImageParameter(self): + return self.ImageParameter + def set_ImageParameter(self, ImageParameter): + self.ImageParameter = ImageParameter + def has__content(self): + if ( + self.ImageParameter is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ImageParametersType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ImageParametersType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ImageParametersType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ImageParametersType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ImageParametersType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ImageParametersType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ImageParametersType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ImageParameter is not None: + namespaceprefix_ = self.ImageParameter_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageParameter_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageParameter>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageParameter), input_name='ImageParameter')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ImageParameter': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageParameter') + value_ = self.gds_validate_string(value_, node, 'ImageParameter') + self.ImageParameter = value_ + self.ImageParameter_nsprefix_ = child_.prefix +# end class ImageParametersType + + +class ShippingContentsType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ItemDetail=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if ItemDetail is None: + self.ItemDetail = [] + else: + self.ItemDetail = ItemDetail + self.ItemDetail_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ShippingContentsType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ShippingContentsType.subclass: + return ShippingContentsType.subclass(*args_, **kwargs_) + else: + return ShippingContentsType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ItemDetail(self): + return self.ItemDetail + def set_ItemDetail(self, ItemDetail): + self.ItemDetail = ItemDetail + def add_ItemDetail(self, value): + self.ItemDetail.append(value) + def insert_ItemDetail_at(self, index, value): + self.ItemDetail.insert(index, value) + def replace_ItemDetail_at(self, index, value): + self.ItemDetail[index] = value + def has__content(self): + if ( + self.ItemDetail + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ShippingContentsType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ShippingContentsType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ShippingContentsType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ShippingContentsType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ShippingContentsType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ShippingContentsType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ShippingContentsType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for ItemDetail_ in self.ItemDetail: + namespaceprefix_ = self.ItemDetail_nsprefix_ + ':' if (UseCapturedNS_ and self.ItemDetail_nsprefix_) else '' + ItemDetail_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ItemDetail', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ItemDetail': + obj_ = ItemDetailType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ItemDetail.append(obj_) + obj_.original_tagname_ = 'ItemDetail' +# end class ShippingContentsType + + +class ItemDetailType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Description=None, Commodity=None, Restriction=None, Quantity=None, UnitValue=None, NetPounds=None, NetOunces=None, UnitOfMeasure=None, HSTariffNumber=None, CountryofManufacture=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Description = Description + self.Description_nsprefix_ = None + self.Commodity = Commodity + self.Commodity_nsprefix_ = None + self.Restriction = Restriction + self.Restriction_nsprefix_ = None + self.Quantity = Quantity + self.Quantity_nsprefix_ = None + self.UnitValue = UnitValue + self.UnitValue_nsprefix_ = None + self.NetPounds = NetPounds + self.NetPounds_nsprefix_ = None + self.NetOunces = NetOunces + self.NetOunces_nsprefix_ = None + self.UnitOfMeasure = UnitOfMeasure + self.UnitOfMeasure_nsprefix_ = None + self.HSTariffNumber = HSTariffNumber + self.HSTariffNumber_nsprefix_ = None + self.CountryofManufacture = CountryofManufacture + self.CountryofManufacture_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ItemDetailType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ItemDetailType.subclass: + return ItemDetailType.subclass(*args_, **kwargs_) + else: + return ItemDetailType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Description(self): + return self.Description + def set_Description(self, Description): + self.Description = Description + def get_Commodity(self): + return self.Commodity + def set_Commodity(self, Commodity): + self.Commodity = Commodity + def get_Restriction(self): + return self.Restriction + def set_Restriction(self, Restriction): + self.Restriction = Restriction + def get_Quantity(self): + return self.Quantity + def set_Quantity(self, Quantity): + self.Quantity = Quantity + def get_UnitValue(self): + return self.UnitValue + def set_UnitValue(self, UnitValue): + self.UnitValue = UnitValue + def get_NetPounds(self): + return self.NetPounds + def set_NetPounds(self, NetPounds): + self.NetPounds = NetPounds + def get_NetOunces(self): + return self.NetOunces + def set_NetOunces(self, NetOunces): + self.NetOunces = NetOunces + def get_UnitOfMeasure(self): + return self.UnitOfMeasure + def set_UnitOfMeasure(self, UnitOfMeasure): + self.UnitOfMeasure = UnitOfMeasure + def get_HSTariffNumber(self): + return self.HSTariffNumber + def set_HSTariffNumber(self, HSTariffNumber): + self.HSTariffNumber = HSTariffNumber + def get_CountryofManufacture(self): + return self.CountryofManufacture + def set_CountryofManufacture(self, CountryofManufacture): + self.CountryofManufacture = CountryofManufacture + def has__content(self): + if ( + self.Description is not None or + self.Commodity is not None or + self.Restriction is not None or + self.Quantity is not None or + self.UnitValue is not None or + self.NetPounds is not None or + self.NetOunces is not None or + self.UnitOfMeasure is not None or + self.HSTariffNumber is not None or + self.CountryofManufacture is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ItemDetailType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ItemDetailType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ItemDetailType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ItemDetailType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ItemDetailType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ItemDetailType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ItemDetailType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Description is not None: + namespaceprefix_ = self.Description_nsprefix_ + ':' if (UseCapturedNS_ and self.Description_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDescription>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Description), input_name='Description')), namespaceprefix_ , eol_)) + if self.Commodity is not None: + namespaceprefix_ = self.Commodity_nsprefix_ + ':' if (UseCapturedNS_ and self.Commodity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommodity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Commodity), input_name='Commodity')), namespaceprefix_ , eol_)) + if self.Restriction is not None: + namespaceprefix_ = self.Restriction_nsprefix_ + ':' if (UseCapturedNS_ and self.Restriction_nsprefix_) else '' + self.Restriction.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Restriction', pretty_print=pretty_print) + if self.Quantity is not None: + namespaceprefix_ = self.Quantity_nsprefix_ + ':' if (UseCapturedNS_ and self.Quantity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sQuantity>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Quantity, input_name='Quantity'), namespaceprefix_ , eol_)) + if self.UnitValue is not None: + namespaceprefix_ = self.UnitValue_nsprefix_ + ':' if (UseCapturedNS_ and self.UnitValue_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUnitValue>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.UnitValue, input_name='UnitValue'), namespaceprefix_ , eol_)) + if self.NetPounds is not None: + namespaceprefix_ = self.NetPounds_nsprefix_ + ':' if (UseCapturedNS_ and self.NetPounds_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNetPounds>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.NetPounds, input_name='NetPounds'), namespaceprefix_ , eol_)) + if self.NetOunces is not None: + namespaceprefix_ = self.NetOunces_nsprefix_ + ':' if (UseCapturedNS_ and self.NetOunces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNetOunces>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.NetOunces, input_name='NetOunces'), namespaceprefix_ , eol_)) + if self.UnitOfMeasure is not None: + namespaceprefix_ = self.UnitOfMeasure_nsprefix_ + ':' if (UseCapturedNS_ and self.UnitOfMeasure_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUnitOfMeasure>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.UnitOfMeasure, input_name='UnitOfMeasure'), namespaceprefix_ , eol_)) + if self.HSTariffNumber is not None: + namespaceprefix_ = self.HSTariffNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.HSTariffNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHSTariffNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.HSTariffNumber), input_name='HSTariffNumber')), namespaceprefix_ , eol_)) + if self.CountryofManufacture is not None: + namespaceprefix_ = self.CountryofManufacture_nsprefix_ + ':' if (UseCapturedNS_ and self.CountryofManufacture_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCountryofManufacture>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CountryofManufacture), input_name='CountryofManufacture')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Description': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Description') + value_ = self.gds_validate_string(value_, node, 'Description') + self.Description = value_ + self.Description_nsprefix_ = child_.prefix + elif nodeName_ == 'Commodity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Commodity') + value_ = self.gds_validate_string(value_, node, 'Commodity') + self.Commodity = value_ + self.Commodity_nsprefix_ = child_.prefix + elif nodeName_ == 'Restriction': + obj_ = RestrictionType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Restriction = obj_ + obj_.original_tagname_ = 'Restriction' + elif nodeName_ == 'Quantity' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Quantity') + ival_ = self.gds_validate_integer(ival_, node, 'Quantity') + self.Quantity = ival_ + self.Quantity_nsprefix_ = child_.prefix + elif nodeName_ == 'UnitValue' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'UnitValue') + fval_ = self.gds_validate_decimal(fval_, node, 'UnitValue') + self.UnitValue = fval_ + self.UnitValue_nsprefix_ = child_.prefix + elif nodeName_ == 'NetPounds' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'NetPounds') + fval_ = self.gds_validate_decimal(fval_, node, 'NetPounds') + self.NetPounds = fval_ + self.NetPounds_nsprefix_ = child_.prefix + elif nodeName_ == 'NetOunces' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'NetOunces') + fval_ = self.gds_validate_decimal(fval_, node, 'NetOunces') + self.NetOunces = fval_ + self.NetOunces_nsprefix_ = child_.prefix + elif nodeName_ == 'UnitOfMeasure' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'UnitOfMeasure') + fval_ = self.gds_validate_decimal(fval_, node, 'UnitOfMeasure') + self.UnitOfMeasure = fval_ + self.UnitOfMeasure_nsprefix_ = child_.prefix + elif nodeName_ == 'HSTariffNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'HSTariffNumber') + value_ = self.gds_validate_string(value_, node, 'HSTariffNumber') + self.HSTariffNumber = value_ + self.HSTariffNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'CountryofManufacture': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CountryofManufacture') + value_ = self.gds_validate_string(value_, node, 'CountryofManufacture') + self.CountryofManufacture = value_ + self.CountryofManufacture_nsprefix_ = child_.prefix +# end class ItemDetailType + + +class RestrictionType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, FootnoteNumber=None, Response=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.FootnoteNumber = FootnoteNumber + self.FootnoteNumber_nsprefix_ = None + self.Response = Response + self.Response_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, RestrictionType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if RestrictionType.subclass: + return RestrictionType.subclass(*args_, **kwargs_) + else: + return RestrictionType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FootnoteNumber(self): + return self.FootnoteNumber + def set_FootnoteNumber(self, FootnoteNumber): + self.FootnoteNumber = FootnoteNumber + def get_Response(self): + return self.Response + def set_Response(self, Response): + self.Response = Response + def has__content(self): + if ( + self.FootnoteNumber is not None or + self.Response is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RestrictionType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('RestrictionType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'RestrictionType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RestrictionType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RestrictionType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RestrictionType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RestrictionType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FootnoteNumber is not None: + namespaceprefix_ = self.FootnoteNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.FootnoteNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFootnoteNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FootnoteNumber), input_name='FootnoteNumber')), namespaceprefix_ , eol_)) + if self.Response is not None: + namespaceprefix_ = self.Response_nsprefix_ + ':' if (UseCapturedNS_ and self.Response_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sResponse>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Response), input_name='Response')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FootnoteNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FootnoteNumber') + value_ = self.gds_validate_string(value_, node, 'FootnoteNumber') + self.FootnoteNumber = value_ + self.FootnoteNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'Response': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Response') + value_ = self.gds_validate_string(value_, node, 'Response') + self.Response = value_ + self.Response_nsprefix_ = child_.prefix +# end class RestrictionType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSGXGGetLabelRequest' + rootClass = eVSGXGGetLabelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSGXGGetLabelRequest' + rootClass = eVSGXGGetLabelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSGXGGetLabelRequest' + rootClass = eVSGXGGetLabelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSGXGGetLabelRequest' + rootClass = eVSGXGGetLabelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from evs_gxg_get_label_request import *\n\n') + sys.stdout.write('import evs_gxg_get_label_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "ImageParametersType", + "ItemDetailType", + "RestrictionType", + "ShippingContentsType", + "eVSGXGGetLabelRequest" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/evs_gxg_get_label_response.py b/modules/connectors/usps/karrio/schemas/usps/evs_gxg_get_label_response.py new file mode 100644 index 0000000000..ca3ee6607a --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/evs_gxg_get_label_response.py @@ -0,0 +1,1694 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:49 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/evs_gxg_get_label_response.py') +# +# Command line arguments: +# ./schemas/eVSGXGGetLabelResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/evs_gxg_get_label_response.py" ./schemas/eVSGXGGetLabelResponse.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class eVSGXGGetLabelResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Postage=None, CommodityGuarantee=None, Insurance=None, USPSBarcodeNumber=None, FedExBarcodeNumber=None, LabelImage=None, LabelImagePage2=None, LabelImagePage3=None, LabelImagePage4=None, CIImage=None, CIImagePage2=None, CIImagePage3=None, CIImagePage4=None, InsuranceFee=None, DimensionalWeight=None, LogMessage=None, RemainingBarcodes=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Postage = Postage + self.Postage_nsprefix_ = None + self.CommodityGuarantee = CommodityGuarantee + self.CommodityGuarantee_nsprefix_ = None + self.Insurance = Insurance + self.Insurance_nsprefix_ = None + self.USPSBarcodeNumber = USPSBarcodeNumber + self.USPSBarcodeNumber_nsprefix_ = None + self.FedExBarcodeNumber = FedExBarcodeNumber + self.FedExBarcodeNumber_nsprefix_ = None + self.LabelImage = LabelImage + self.LabelImage_nsprefix_ = None + self.LabelImagePage2 = LabelImagePage2 + self.LabelImagePage2_nsprefix_ = None + self.LabelImagePage3 = LabelImagePage3 + self.LabelImagePage3_nsprefix_ = None + self.LabelImagePage4 = LabelImagePage4 + self.LabelImagePage4_nsprefix_ = None + self.CIImage = CIImage + self.CIImage_nsprefix_ = None + self.CIImagePage2 = CIImagePage2 + self.CIImagePage2_nsprefix_ = None + self.CIImagePage3 = CIImagePage3 + self.CIImagePage3_nsprefix_ = None + self.CIImagePage4 = CIImagePage4 + self.CIImagePage4_nsprefix_ = None + self.InsuranceFee = InsuranceFee + self.InsuranceFee_nsprefix_ = None + self.DimensionalWeight = DimensionalWeight + self.DimensionalWeight_nsprefix_ = None + self.LogMessage = LogMessage + self.LogMessage_nsprefix_ = None + self.RemainingBarcodes = RemainingBarcodes + self.RemainingBarcodes_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, eVSGXGGetLabelResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if eVSGXGGetLabelResponse.subclass: + return eVSGXGGetLabelResponse.subclass(*args_, **kwargs_) + else: + return eVSGXGGetLabelResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Postage(self): + return self.Postage + def set_Postage(self, Postage): + self.Postage = Postage + def get_CommodityGuarantee(self): + return self.CommodityGuarantee + def set_CommodityGuarantee(self, CommodityGuarantee): + self.CommodityGuarantee = CommodityGuarantee + def get_Insurance(self): + return self.Insurance + def set_Insurance(self, Insurance): + self.Insurance = Insurance + def get_USPSBarcodeNumber(self): + return self.USPSBarcodeNumber + def set_USPSBarcodeNumber(self, USPSBarcodeNumber): + self.USPSBarcodeNumber = USPSBarcodeNumber + def get_FedExBarcodeNumber(self): + return self.FedExBarcodeNumber + def set_FedExBarcodeNumber(self, FedExBarcodeNumber): + self.FedExBarcodeNumber = FedExBarcodeNumber + def get_LabelImage(self): + return self.LabelImage + def set_LabelImage(self, LabelImage): + self.LabelImage = LabelImage + def get_LabelImagePage2(self): + return self.LabelImagePage2 + def set_LabelImagePage2(self, LabelImagePage2): + self.LabelImagePage2 = LabelImagePage2 + def get_LabelImagePage3(self): + return self.LabelImagePage3 + def set_LabelImagePage3(self, LabelImagePage3): + self.LabelImagePage3 = LabelImagePage3 + def get_LabelImagePage4(self): + return self.LabelImagePage4 + def set_LabelImagePage4(self, LabelImagePage4): + self.LabelImagePage4 = LabelImagePage4 + def get_CIImage(self): + return self.CIImage + def set_CIImage(self, CIImage): + self.CIImage = CIImage + def get_CIImagePage2(self): + return self.CIImagePage2 + def set_CIImagePage2(self, CIImagePage2): + self.CIImagePage2 = CIImagePage2 + def get_CIImagePage3(self): + return self.CIImagePage3 + def set_CIImagePage3(self, CIImagePage3): + self.CIImagePage3 = CIImagePage3 + def get_CIImagePage4(self): + return self.CIImagePage4 + def set_CIImagePage4(self, CIImagePage4): + self.CIImagePage4 = CIImagePage4 + def get_InsuranceFee(self): + return self.InsuranceFee + def set_InsuranceFee(self, InsuranceFee): + self.InsuranceFee = InsuranceFee + def get_DimensionalWeight(self): + return self.DimensionalWeight + def set_DimensionalWeight(self, DimensionalWeight): + self.DimensionalWeight = DimensionalWeight + def get_LogMessage(self): + return self.LogMessage + def set_LogMessage(self, LogMessage): + self.LogMessage = LogMessage + def get_RemainingBarcodes(self): + return self.RemainingBarcodes + def set_RemainingBarcodes(self, RemainingBarcodes): + self.RemainingBarcodes = RemainingBarcodes + def has__content(self): + if ( + self.Postage is not None or + self.CommodityGuarantee is not None or + self.Insurance is not None or + self.USPSBarcodeNumber is not None or + self.FedExBarcodeNumber is not None or + self.LabelImage is not None or + self.LabelImagePage2 is not None or + self.LabelImagePage3 is not None or + self.LabelImagePage4 is not None or + self.CIImage is not None or + self.CIImagePage2 is not None or + self.CIImagePage3 is not None or + self.CIImagePage4 is not None or + self.InsuranceFee is not None or + self.DimensionalWeight is not None or + self.LogMessage is not None or + self.RemainingBarcodes is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSGXGGetLabelResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('eVSGXGGetLabelResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'eVSGXGGetLabelResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='eVSGXGGetLabelResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='eVSGXGGetLabelResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='eVSGXGGetLabelResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSGXGGetLabelResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Postage is not None: + namespaceprefix_ = self.Postage_nsprefix_ + ':' if (UseCapturedNS_ and self.Postage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPostage>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Postage, input_name='Postage'), namespaceprefix_ , eol_)) + if self.CommodityGuarantee is not None: + namespaceprefix_ = self.CommodityGuarantee_nsprefix_ + ':' if (UseCapturedNS_ and self.CommodityGuarantee_nsprefix_) else '' + self.CommodityGuarantee.export(outfile, level, namespaceprefix_, namespacedef_='', name_='CommodityGuarantee', pretty_print=pretty_print) + if self.Insurance is not None: + namespaceprefix_ = self.Insurance_nsprefix_ + ':' if (UseCapturedNS_ and self.Insurance_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInsurance>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Insurance), input_name='Insurance')), namespaceprefix_ , eol_)) + if self.USPSBarcodeNumber is not None: + namespaceprefix_ = self.USPSBarcodeNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.USPSBarcodeNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUSPSBarcodeNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.USPSBarcodeNumber), input_name='USPSBarcodeNumber')), namespaceprefix_ , eol_)) + if self.FedExBarcodeNumber is not None: + namespaceprefix_ = self.FedExBarcodeNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.FedExBarcodeNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFedExBarcodeNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FedExBarcodeNumber), input_name='FedExBarcodeNumber')), namespaceprefix_ , eol_)) + if self.LabelImage is not None: + namespaceprefix_ = self.LabelImage_nsprefix_ + ':' if (UseCapturedNS_ and self.LabelImage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLabelImage>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LabelImage), input_name='LabelImage')), namespaceprefix_ , eol_)) + if self.LabelImagePage2 is not None: + namespaceprefix_ = self.LabelImagePage2_nsprefix_ + ':' if (UseCapturedNS_ and self.LabelImagePage2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLabelImagePage2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LabelImagePage2), input_name='LabelImagePage2')), namespaceprefix_ , eol_)) + if self.LabelImagePage3 is not None: + namespaceprefix_ = self.LabelImagePage3_nsprefix_ + ':' if (UseCapturedNS_ and self.LabelImagePage3_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLabelImagePage3>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LabelImagePage3), input_name='LabelImagePage3')), namespaceprefix_ , eol_)) + if self.LabelImagePage4 is not None: + namespaceprefix_ = self.LabelImagePage4_nsprefix_ + ':' if (UseCapturedNS_ and self.LabelImagePage4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLabelImagePage4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LabelImagePage4), input_name='LabelImagePage4')), namespaceprefix_ , eol_)) + if self.CIImage is not None: + namespaceprefix_ = self.CIImage_nsprefix_ + ':' if (UseCapturedNS_ and self.CIImage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCIImage>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CIImage), input_name='CIImage')), namespaceprefix_ , eol_)) + if self.CIImagePage2 is not None: + namespaceprefix_ = self.CIImagePage2_nsprefix_ + ':' if (UseCapturedNS_ and self.CIImagePage2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCIImagePage2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CIImagePage2), input_name='CIImagePage2')), namespaceprefix_ , eol_)) + if self.CIImagePage3 is not None: + namespaceprefix_ = self.CIImagePage3_nsprefix_ + ':' if (UseCapturedNS_ and self.CIImagePage3_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCIImagePage3>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CIImagePage3), input_name='CIImagePage3')), namespaceprefix_ , eol_)) + if self.CIImagePage4 is not None: + namespaceprefix_ = self.CIImagePage4_nsprefix_ + ':' if (UseCapturedNS_ and self.CIImagePage4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCIImagePage4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CIImagePage4), input_name='CIImagePage4')), namespaceprefix_ , eol_)) + if self.InsuranceFee is not None: + namespaceprefix_ = self.InsuranceFee_nsprefix_ + ':' if (UseCapturedNS_ and self.InsuranceFee_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInsuranceFee>%s%s' % (namespaceprefix_ , self.gds_format_float(self.InsuranceFee, input_name='InsuranceFee'), namespaceprefix_ , eol_)) + if self.DimensionalWeight is not None: + namespaceprefix_ = self.DimensionalWeight_nsprefix_ + ':' if (UseCapturedNS_ and self.DimensionalWeight_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDimensionalWeight>%s%s' % (namespaceprefix_ , self.gds_format_float(self.DimensionalWeight, input_name='DimensionalWeight'), namespaceprefix_ , eol_)) + if self.LogMessage is not None: + namespaceprefix_ = self.LogMessage_nsprefix_ + ':' if (UseCapturedNS_ and self.LogMessage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLogMessage>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LogMessage), input_name='LogMessage')), namespaceprefix_ , eol_)) + if self.RemainingBarcodes is not None: + namespaceprefix_ = self.RemainingBarcodes_nsprefix_ + ':' if (UseCapturedNS_ and self.RemainingBarcodes_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRemainingBarcodes>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.RemainingBarcodes, input_name='RemainingBarcodes'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Postage' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Postage') + fval_ = self.gds_validate_float(fval_, node, 'Postage') + self.Postage = fval_ + self.Postage_nsprefix_ = child_.prefix + elif nodeName_ == 'CommodityGuarantee': + obj_ = CommodityGuaranteeType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.CommodityGuarantee = obj_ + obj_.original_tagname_ = 'CommodityGuarantee' + elif nodeName_ == 'Insurance': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Insurance') + value_ = self.gds_validate_string(value_, node, 'Insurance') + self.Insurance = value_ + self.Insurance_nsprefix_ = child_.prefix + elif nodeName_ == 'USPSBarcodeNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'USPSBarcodeNumber') + value_ = self.gds_validate_string(value_, node, 'USPSBarcodeNumber') + self.USPSBarcodeNumber = value_ + self.USPSBarcodeNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'FedExBarcodeNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FedExBarcodeNumber') + value_ = self.gds_validate_string(value_, node, 'FedExBarcodeNumber') + self.FedExBarcodeNumber = value_ + self.FedExBarcodeNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'LabelImage': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LabelImage') + value_ = self.gds_validate_string(value_, node, 'LabelImage') + self.LabelImage = value_ + self.LabelImage_nsprefix_ = child_.prefix + elif nodeName_ == 'LabelImagePage2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LabelImagePage2') + value_ = self.gds_validate_string(value_, node, 'LabelImagePage2') + self.LabelImagePage2 = value_ + self.LabelImagePage2_nsprefix_ = child_.prefix + elif nodeName_ == 'LabelImagePage3': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LabelImagePage3') + value_ = self.gds_validate_string(value_, node, 'LabelImagePage3') + self.LabelImagePage3 = value_ + self.LabelImagePage3_nsprefix_ = child_.prefix + elif nodeName_ == 'LabelImagePage4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LabelImagePage4') + value_ = self.gds_validate_string(value_, node, 'LabelImagePage4') + self.LabelImagePage4 = value_ + self.LabelImagePage4_nsprefix_ = child_.prefix + elif nodeName_ == 'CIImage': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CIImage') + value_ = self.gds_validate_string(value_, node, 'CIImage') + self.CIImage = value_ + self.CIImage_nsprefix_ = child_.prefix + elif nodeName_ == 'CIImagePage2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CIImagePage2') + value_ = self.gds_validate_string(value_, node, 'CIImagePage2') + self.CIImagePage2 = value_ + self.CIImagePage2_nsprefix_ = child_.prefix + elif nodeName_ == 'CIImagePage3': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CIImagePage3') + value_ = self.gds_validate_string(value_, node, 'CIImagePage3') + self.CIImagePage3 = value_ + self.CIImagePage3_nsprefix_ = child_.prefix + elif nodeName_ == 'CIImagePage4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CIImagePage4') + value_ = self.gds_validate_string(value_, node, 'CIImagePage4') + self.CIImagePage4 = value_ + self.CIImagePage4_nsprefix_ = child_.prefix + elif nodeName_ == 'InsuranceFee' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'InsuranceFee') + fval_ = self.gds_validate_float(fval_, node, 'InsuranceFee') + self.InsuranceFee = fval_ + self.InsuranceFee_nsprefix_ = child_.prefix + elif nodeName_ == 'DimensionalWeight' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'DimensionalWeight') + fval_ = self.gds_validate_float(fval_, node, 'DimensionalWeight') + self.DimensionalWeight = fval_ + self.DimensionalWeight_nsprefix_ = child_.prefix + elif nodeName_ == 'LogMessage': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LogMessage') + value_ = self.gds_validate_string(value_, node, 'LogMessage') + self.LogMessage = value_ + self.LogMessage_nsprefix_ = child_.prefix + elif nodeName_ == 'RemainingBarcodes' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'RemainingBarcodes') + ival_ = self.gds_validate_integer(ival_, node, 'RemainingBarcodes') + self.RemainingBarcodes = ival_ + self.RemainingBarcodes_nsprefix_ = child_.prefix +# end class eVSGXGGetLabelResponse + + +class CommodityGuaranteeType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, CommodityType=None, GuaranteeDate=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.CommodityType = CommodityType + self.CommodityType_nsprefix_ = None + self.GuaranteeDate = GuaranteeDate + self.GuaranteeDate_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CommodityGuaranteeType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CommodityGuaranteeType.subclass: + return CommodityGuaranteeType.subclass(*args_, **kwargs_) + else: + return CommodityGuaranteeType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_CommodityType(self): + return self.CommodityType + def set_CommodityType(self, CommodityType): + self.CommodityType = CommodityType + def get_GuaranteeDate(self): + return self.GuaranteeDate + def set_GuaranteeDate(self, GuaranteeDate): + self.GuaranteeDate = GuaranteeDate + def has__content(self): + if ( + self.CommodityType is not None or + self.GuaranteeDate is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CommodityGuaranteeType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CommodityGuaranteeType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CommodityGuaranteeType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CommodityGuaranteeType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CommodityGuaranteeType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CommodityGuaranteeType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CommodityGuaranteeType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.CommodityType is not None: + namespaceprefix_ = self.CommodityType_nsprefix_ + ':' if (UseCapturedNS_ and self.CommodityType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommodityType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CommodityType), input_name='CommodityType')), namespaceprefix_ , eol_)) + if self.GuaranteeDate is not None: + namespaceprefix_ = self.GuaranteeDate_nsprefix_ + ':' if (UseCapturedNS_ and self.GuaranteeDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGuaranteeDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.GuaranteeDate), input_name='GuaranteeDate')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'CommodityType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CommodityType') + value_ = self.gds_validate_string(value_, node, 'CommodityType') + self.CommodityType = value_ + self.CommodityType_nsprefix_ = child_.prefix + elif nodeName_ == 'GuaranteeDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'GuaranteeDate') + value_ = self.gds_validate_string(value_, node, 'GuaranteeDate') + self.GuaranteeDate = value_ + self.GuaranteeDate_nsprefix_ = child_.prefix +# end class CommodityGuaranteeType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSGXGGetLabelResponse' + rootClass = eVSGXGGetLabelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSGXGGetLabelResponse' + rootClass = eVSGXGGetLabelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSGXGGetLabelResponse' + rootClass = eVSGXGGetLabelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSGXGGetLabelResponse' + rootClass = eVSGXGGetLabelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from evs_gxg_get_label_response import *\n\n') + sys.stdout.write('import evs_gxg_get_label_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CommodityGuaranteeType", + "eVSGXGGetLabelResponse" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/evs_priority_mail_intl_request.py b/modules/connectors/usps/karrio/schemas/usps/evs_priority_mail_intl_request.py new file mode 100644 index 0000000000..dc956a46b3 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/evs_priority_mail_intl_request.py @@ -0,0 +1,3446 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:49 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/evs_priority_mail_intl_request.py') +# +# Command line arguments: +# ./schemas/eVSPriorityMailIntlRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/evs_priority_mail_intl_request.py" ./schemas/eVSPriorityMailIntlRequest.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class eVSPriorityMailIntlRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, Option=None, Revision=None, ImageParameters=None, FromFirstName=None, FromMiddleInitial=None, FromLastName=None, FromFirm=None, FromAddress1=None, FromAddress2=None, FromUrbanization=None, FromCity=None, FromState=None, FromZip5=None, FromZip4=None, FromPhone=None, FromCustomsReference=None, ToName=None, ToFirstName=None, ToLastName=None, ToFirm=None, ToAddress1=None, ToAddress2=None, ToAddress3=None, ToCity=None, ToProvince=None, ToCountry=None, ToPostalCode=None, ToPOBoxFlag=None, ToPhone=None, ToFax=None, ToEmail=None, ImportersReferenceNumber=None, NonDeliveryOption=None, RedirectName=None, RedirectEmail=None, RedirectSMS=None, RedirectAddress=None, RedirectCity=None, RedirectState=None, RedirectZipCode=None, RedirectZip4=None, Container=None, ShippingContents=None, Insured=None, InsuredNumber=None, InsuredAmount=None, GrossPounds=None, GrossOunces=None, ContentType=None, ContentTypeOther=None, Agreement=None, Comments=None, LicenseNumber=None, CertificateNumber=None, InvoiceNumber=None, ImageType=None, ImageLayout=None, CustomerRefNo=None, CustomerRefNo2=None, POZipCode=None, LabelDate=None, EMCAAccount=None, HoldForManifest=None, EELPFC=None, PriceOptions=None, Width=None, Length=None, Height=None, Girth=None, ExtraServices=None, ActionCode=None, OptOutOfSPE=None, PermitNumber=None, AccountZipCode=None, ImportersReferenceType=None, ImportersTelephoneNumber=None, ImportersFaxNumber=None, ImportersEmail=None, Machinable=None, DestinationRateIndicator=None, MID=None, LogisticsManagerMID=None, CRID=None, VendorCode=None, VendorProductVersionNumber=None, ePostageMailerReporting=None, SenderFirstName=None, SenderLastName=None, SenderBusinessName=None, SenderAddress1=None, SenderCity=None, SenderState=None, SenderZip5=None, SenderPhone=None, SenderEmail=None, ChargebackCode=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.Option = Option + self.Option_nsprefix_ = None + self.Revision = Revision + self.Revision_nsprefix_ = None + self.ImageParameters = ImageParameters + self.ImageParameters_nsprefix_ = None + self.FromFirstName = FromFirstName + self.FromFirstName_nsprefix_ = None + self.FromMiddleInitial = FromMiddleInitial + self.FromMiddleInitial_nsprefix_ = None + self.FromLastName = FromLastName + self.FromLastName_nsprefix_ = None + self.FromFirm = FromFirm + self.FromFirm_nsprefix_ = None + self.FromAddress1 = FromAddress1 + self.FromAddress1_nsprefix_ = None + self.FromAddress2 = FromAddress2 + self.FromAddress2_nsprefix_ = None + self.FromUrbanization = FromUrbanization + self.FromUrbanization_nsprefix_ = None + self.FromCity = FromCity + self.FromCity_nsprefix_ = None + self.FromState = FromState + self.FromState_nsprefix_ = None + self.FromZip5 = FromZip5 + self.FromZip5_nsprefix_ = None + self.FromZip4 = FromZip4 + self.FromZip4_nsprefix_ = None + self.FromPhone = FromPhone + self.FromPhone_nsprefix_ = None + self.FromCustomsReference = FromCustomsReference + self.FromCustomsReference_nsprefix_ = None + self.ToName = ToName + self.ToName_nsprefix_ = None + self.ToFirstName = ToFirstName + self.ToFirstName_nsprefix_ = None + self.ToLastName = ToLastName + self.ToLastName_nsprefix_ = None + self.ToFirm = ToFirm + self.ToFirm_nsprefix_ = None + self.ToAddress1 = ToAddress1 + self.ToAddress1_nsprefix_ = None + self.ToAddress2 = ToAddress2 + self.ToAddress2_nsprefix_ = None + self.ToAddress3 = ToAddress3 + self.ToAddress3_nsprefix_ = None + self.ToCity = ToCity + self.ToCity_nsprefix_ = None + self.ToProvince = ToProvince + self.ToProvince_nsprefix_ = None + self.ToCountry = ToCountry + self.ToCountry_nsprefix_ = None + self.ToPostalCode = ToPostalCode + self.ToPostalCode_nsprefix_ = None + self.ToPOBoxFlag = ToPOBoxFlag + self.ToPOBoxFlag_nsprefix_ = None + self.ToPhone = ToPhone + self.ToPhone_nsprefix_ = None + self.ToFax = ToFax + self.ToFax_nsprefix_ = None + self.ToEmail = ToEmail + self.ToEmail_nsprefix_ = None + self.ImportersReferenceNumber = ImportersReferenceNumber + self.ImportersReferenceNumber_nsprefix_ = None + self.NonDeliveryOption = NonDeliveryOption + self.NonDeliveryOption_nsprefix_ = None + self.RedirectName = RedirectName + self.RedirectName_nsprefix_ = None + self.RedirectEmail = RedirectEmail + self.RedirectEmail_nsprefix_ = None + self.RedirectSMS = RedirectSMS + self.RedirectSMS_nsprefix_ = None + self.RedirectAddress = RedirectAddress + self.RedirectAddress_nsprefix_ = None + self.RedirectCity = RedirectCity + self.RedirectCity_nsprefix_ = None + self.RedirectState = RedirectState + self.RedirectState_nsprefix_ = None + self.RedirectZipCode = RedirectZipCode + self.RedirectZipCode_nsprefix_ = None + self.RedirectZip4 = RedirectZip4 + self.RedirectZip4_nsprefix_ = None + self.Container = Container + self.Container_nsprefix_ = None + self.ShippingContents = ShippingContents + self.ShippingContents_nsprefix_ = None + self.Insured = Insured + self.Insured_nsprefix_ = None + self.InsuredNumber = InsuredNumber + self.InsuredNumber_nsprefix_ = None + self.InsuredAmount = InsuredAmount + self.InsuredAmount_nsprefix_ = None + self.GrossPounds = GrossPounds + self.GrossPounds_nsprefix_ = None + self.GrossOunces = GrossOunces + self.GrossOunces_nsprefix_ = None + self.ContentType = ContentType + self.ContentType_nsprefix_ = None + self.ContentTypeOther = ContentTypeOther + self.ContentTypeOther_nsprefix_ = None + self.Agreement = Agreement + self.Agreement_nsprefix_ = None + self.Comments = Comments + self.Comments_nsprefix_ = None + self.LicenseNumber = LicenseNumber + self.LicenseNumber_nsprefix_ = None + self.CertificateNumber = CertificateNumber + self.CertificateNumber_nsprefix_ = None + self.InvoiceNumber = InvoiceNumber + self.InvoiceNumber_nsprefix_ = None + self.ImageType = ImageType + self.ImageType_nsprefix_ = None + self.ImageLayout = ImageLayout + self.ImageLayout_nsprefix_ = None + self.CustomerRefNo = CustomerRefNo + self.CustomerRefNo_nsprefix_ = None + self.CustomerRefNo2 = CustomerRefNo2 + self.CustomerRefNo2_nsprefix_ = None + self.POZipCode = POZipCode + self.POZipCode_nsprefix_ = None + self.LabelDate = LabelDate + self.LabelDate_nsprefix_ = None + self.EMCAAccount = EMCAAccount + self.EMCAAccount_nsprefix_ = None + self.HoldForManifest = HoldForManifest + self.HoldForManifest_nsprefix_ = None + self.EELPFC = EELPFC + self.EELPFC_nsprefix_ = None + self.PriceOptions = PriceOptions + self.PriceOptions_nsprefix_ = None + self.Width = Width + self.Width_nsprefix_ = None + self.Length = Length + self.Length_nsprefix_ = None + self.Height = Height + self.Height_nsprefix_ = None + self.Girth = Girth + self.Girth_nsprefix_ = None + self.ExtraServices = ExtraServices + self.ExtraServices_nsprefix_ = None + self.ActionCode = ActionCode + self.ActionCode_nsprefix_ = None + self.OptOutOfSPE = OptOutOfSPE + self.OptOutOfSPE_nsprefix_ = None + self.PermitNumber = PermitNumber + self.PermitNumber_nsprefix_ = None + self.AccountZipCode = AccountZipCode + self.AccountZipCode_nsprefix_ = None + self.ImportersReferenceType = ImportersReferenceType + self.ImportersReferenceType_nsprefix_ = None + self.ImportersTelephoneNumber = ImportersTelephoneNumber + self.ImportersTelephoneNumber_nsprefix_ = None + self.ImportersFaxNumber = ImportersFaxNumber + self.ImportersFaxNumber_nsprefix_ = None + self.ImportersEmail = ImportersEmail + self.ImportersEmail_nsprefix_ = None + self.Machinable = Machinable + self.Machinable_nsprefix_ = None + self.DestinationRateIndicator = DestinationRateIndicator + self.DestinationRateIndicator_nsprefix_ = None + self.MID = MID + self.MID_nsprefix_ = None + self.LogisticsManagerMID = LogisticsManagerMID + self.LogisticsManagerMID_nsprefix_ = None + self.CRID = CRID + self.CRID_nsprefix_ = None + self.VendorCode = VendorCode + self.VendorCode_nsprefix_ = None + self.VendorProductVersionNumber = VendorProductVersionNumber + self.VendorProductVersionNumber_nsprefix_ = None + self.ePostageMailerReporting = ePostageMailerReporting + self.ePostageMailerReporting_nsprefix_ = None + self.SenderFirstName = SenderFirstName + self.SenderFirstName_nsprefix_ = None + self.SenderLastName = SenderLastName + self.SenderLastName_nsprefix_ = None + self.SenderBusinessName = SenderBusinessName + self.SenderBusinessName_nsprefix_ = None + self.SenderAddress1 = SenderAddress1 + self.SenderAddress1_nsprefix_ = None + self.SenderCity = SenderCity + self.SenderCity_nsprefix_ = None + self.SenderState = SenderState + self.SenderState_nsprefix_ = None + self.SenderZip5 = SenderZip5 + self.SenderZip5_nsprefix_ = None + self.SenderPhone = SenderPhone + self.SenderPhone_nsprefix_ = None + self.SenderEmail = SenderEmail + self.SenderEmail_nsprefix_ = None + self.ChargebackCode = ChargebackCode + self.ChargebackCode_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, eVSPriorityMailIntlRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if eVSPriorityMailIntlRequest.subclass: + return eVSPriorityMailIntlRequest.subclass(*args_, **kwargs_) + else: + return eVSPriorityMailIntlRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Option(self): + return self.Option + def set_Option(self, Option): + self.Option = Option + def get_Revision(self): + return self.Revision + def set_Revision(self, Revision): + self.Revision = Revision + def get_ImageParameters(self): + return self.ImageParameters + def set_ImageParameters(self, ImageParameters): + self.ImageParameters = ImageParameters + def get_FromFirstName(self): + return self.FromFirstName + def set_FromFirstName(self, FromFirstName): + self.FromFirstName = FromFirstName + def get_FromMiddleInitial(self): + return self.FromMiddleInitial + def set_FromMiddleInitial(self, FromMiddleInitial): + self.FromMiddleInitial = FromMiddleInitial + def get_FromLastName(self): + return self.FromLastName + def set_FromLastName(self, FromLastName): + self.FromLastName = FromLastName + def get_FromFirm(self): + return self.FromFirm + def set_FromFirm(self, FromFirm): + self.FromFirm = FromFirm + def get_FromAddress1(self): + return self.FromAddress1 + def set_FromAddress1(self, FromAddress1): + self.FromAddress1 = FromAddress1 + def get_FromAddress2(self): + return self.FromAddress2 + def set_FromAddress2(self, FromAddress2): + self.FromAddress2 = FromAddress2 + def get_FromUrbanization(self): + return self.FromUrbanization + def set_FromUrbanization(self, FromUrbanization): + self.FromUrbanization = FromUrbanization + def get_FromCity(self): + return self.FromCity + def set_FromCity(self, FromCity): + self.FromCity = FromCity + def get_FromState(self): + return self.FromState + def set_FromState(self, FromState): + self.FromState = FromState + def get_FromZip5(self): + return self.FromZip5 + def set_FromZip5(self, FromZip5): + self.FromZip5 = FromZip5 + def get_FromZip4(self): + return self.FromZip4 + def set_FromZip4(self, FromZip4): + self.FromZip4 = FromZip4 + def get_FromPhone(self): + return self.FromPhone + def set_FromPhone(self, FromPhone): + self.FromPhone = FromPhone + def get_FromCustomsReference(self): + return self.FromCustomsReference + def set_FromCustomsReference(self, FromCustomsReference): + self.FromCustomsReference = FromCustomsReference + def get_ToName(self): + return self.ToName + def set_ToName(self, ToName): + self.ToName = ToName + def get_ToFirstName(self): + return self.ToFirstName + def set_ToFirstName(self, ToFirstName): + self.ToFirstName = ToFirstName + def get_ToLastName(self): + return self.ToLastName + def set_ToLastName(self, ToLastName): + self.ToLastName = ToLastName + def get_ToFirm(self): + return self.ToFirm + def set_ToFirm(self, ToFirm): + self.ToFirm = ToFirm + def get_ToAddress1(self): + return self.ToAddress1 + def set_ToAddress1(self, ToAddress1): + self.ToAddress1 = ToAddress1 + def get_ToAddress2(self): + return self.ToAddress2 + def set_ToAddress2(self, ToAddress2): + self.ToAddress2 = ToAddress2 + def get_ToAddress3(self): + return self.ToAddress3 + def set_ToAddress3(self, ToAddress3): + self.ToAddress3 = ToAddress3 + def get_ToCity(self): + return self.ToCity + def set_ToCity(self, ToCity): + self.ToCity = ToCity + def get_ToProvince(self): + return self.ToProvince + def set_ToProvince(self, ToProvince): + self.ToProvince = ToProvince + def get_ToCountry(self): + return self.ToCountry + def set_ToCountry(self, ToCountry): + self.ToCountry = ToCountry + def get_ToPostalCode(self): + return self.ToPostalCode + def set_ToPostalCode(self, ToPostalCode): + self.ToPostalCode = ToPostalCode + def get_ToPOBoxFlag(self): + return self.ToPOBoxFlag + def set_ToPOBoxFlag(self, ToPOBoxFlag): + self.ToPOBoxFlag = ToPOBoxFlag + def get_ToPhone(self): + return self.ToPhone + def set_ToPhone(self, ToPhone): + self.ToPhone = ToPhone + def get_ToFax(self): + return self.ToFax + def set_ToFax(self, ToFax): + self.ToFax = ToFax + def get_ToEmail(self): + return self.ToEmail + def set_ToEmail(self, ToEmail): + self.ToEmail = ToEmail + def get_ImportersReferenceNumber(self): + return self.ImportersReferenceNumber + def set_ImportersReferenceNumber(self, ImportersReferenceNumber): + self.ImportersReferenceNumber = ImportersReferenceNumber + def get_NonDeliveryOption(self): + return self.NonDeliveryOption + def set_NonDeliveryOption(self, NonDeliveryOption): + self.NonDeliveryOption = NonDeliveryOption + def get_RedirectName(self): + return self.RedirectName + def set_RedirectName(self, RedirectName): + self.RedirectName = RedirectName + def get_RedirectEmail(self): + return self.RedirectEmail + def set_RedirectEmail(self, RedirectEmail): + self.RedirectEmail = RedirectEmail + def get_RedirectSMS(self): + return self.RedirectSMS + def set_RedirectSMS(self, RedirectSMS): + self.RedirectSMS = RedirectSMS + def get_RedirectAddress(self): + return self.RedirectAddress + def set_RedirectAddress(self, RedirectAddress): + self.RedirectAddress = RedirectAddress + def get_RedirectCity(self): + return self.RedirectCity + def set_RedirectCity(self, RedirectCity): + self.RedirectCity = RedirectCity + def get_RedirectState(self): + return self.RedirectState + def set_RedirectState(self, RedirectState): + self.RedirectState = RedirectState + def get_RedirectZipCode(self): + return self.RedirectZipCode + def set_RedirectZipCode(self, RedirectZipCode): + self.RedirectZipCode = RedirectZipCode + def get_RedirectZip4(self): + return self.RedirectZip4 + def set_RedirectZip4(self, RedirectZip4): + self.RedirectZip4 = RedirectZip4 + def get_Container(self): + return self.Container + def set_Container(self, Container): + self.Container = Container + def get_ShippingContents(self): + return self.ShippingContents + def set_ShippingContents(self, ShippingContents): + self.ShippingContents = ShippingContents + def get_Insured(self): + return self.Insured + def set_Insured(self, Insured): + self.Insured = Insured + def get_InsuredNumber(self): + return self.InsuredNumber + def set_InsuredNumber(self, InsuredNumber): + self.InsuredNumber = InsuredNumber + def get_InsuredAmount(self): + return self.InsuredAmount + def set_InsuredAmount(self, InsuredAmount): + self.InsuredAmount = InsuredAmount + def get_GrossPounds(self): + return self.GrossPounds + def set_GrossPounds(self, GrossPounds): + self.GrossPounds = GrossPounds + def get_GrossOunces(self): + return self.GrossOunces + def set_GrossOunces(self, GrossOunces): + self.GrossOunces = GrossOunces + def get_ContentType(self): + return self.ContentType + def set_ContentType(self, ContentType): + self.ContentType = ContentType + def get_ContentTypeOther(self): + return self.ContentTypeOther + def set_ContentTypeOther(self, ContentTypeOther): + self.ContentTypeOther = ContentTypeOther + def get_Agreement(self): + return self.Agreement + def set_Agreement(self, Agreement): + self.Agreement = Agreement + def get_Comments(self): + return self.Comments + def set_Comments(self, Comments): + self.Comments = Comments + def get_LicenseNumber(self): + return self.LicenseNumber + def set_LicenseNumber(self, LicenseNumber): + self.LicenseNumber = LicenseNumber + def get_CertificateNumber(self): + return self.CertificateNumber + def set_CertificateNumber(self, CertificateNumber): + self.CertificateNumber = CertificateNumber + def get_InvoiceNumber(self): + return self.InvoiceNumber + def set_InvoiceNumber(self, InvoiceNumber): + self.InvoiceNumber = InvoiceNumber + def get_ImageType(self): + return self.ImageType + def set_ImageType(self, ImageType): + self.ImageType = ImageType + def get_ImageLayout(self): + return self.ImageLayout + def set_ImageLayout(self, ImageLayout): + self.ImageLayout = ImageLayout + def get_CustomerRefNo(self): + return self.CustomerRefNo + def set_CustomerRefNo(self, CustomerRefNo): + self.CustomerRefNo = CustomerRefNo + def get_CustomerRefNo2(self): + return self.CustomerRefNo2 + def set_CustomerRefNo2(self, CustomerRefNo2): + self.CustomerRefNo2 = CustomerRefNo2 + def get_POZipCode(self): + return self.POZipCode + def set_POZipCode(self, POZipCode): + self.POZipCode = POZipCode + def get_LabelDate(self): + return self.LabelDate + def set_LabelDate(self, LabelDate): + self.LabelDate = LabelDate + def get_EMCAAccount(self): + return self.EMCAAccount + def set_EMCAAccount(self, EMCAAccount): + self.EMCAAccount = EMCAAccount + def get_HoldForManifest(self): + return self.HoldForManifest + def set_HoldForManifest(self, HoldForManifest): + self.HoldForManifest = HoldForManifest + def get_EELPFC(self): + return self.EELPFC + def set_EELPFC(self, EELPFC): + self.EELPFC = EELPFC + def get_PriceOptions(self): + return self.PriceOptions + def set_PriceOptions(self, PriceOptions): + self.PriceOptions = PriceOptions + def get_Width(self): + return self.Width + def set_Width(self, Width): + self.Width = Width + def get_Length(self): + return self.Length + def set_Length(self, Length): + self.Length = Length + def get_Height(self): + return self.Height + def set_Height(self, Height): + self.Height = Height + def get_Girth(self): + return self.Girth + def set_Girth(self, Girth): + self.Girth = Girth + def get_ExtraServices(self): + return self.ExtraServices + def set_ExtraServices(self, ExtraServices): + self.ExtraServices = ExtraServices + def get_ActionCode(self): + return self.ActionCode + def set_ActionCode(self, ActionCode): + self.ActionCode = ActionCode + def get_OptOutOfSPE(self): + return self.OptOutOfSPE + def set_OptOutOfSPE(self, OptOutOfSPE): + self.OptOutOfSPE = OptOutOfSPE + def get_PermitNumber(self): + return self.PermitNumber + def set_PermitNumber(self, PermitNumber): + self.PermitNumber = PermitNumber + def get_AccountZipCode(self): + return self.AccountZipCode + def set_AccountZipCode(self, AccountZipCode): + self.AccountZipCode = AccountZipCode + def get_ImportersReferenceType(self): + return self.ImportersReferenceType + def set_ImportersReferenceType(self, ImportersReferenceType): + self.ImportersReferenceType = ImportersReferenceType + def get_ImportersTelephoneNumber(self): + return self.ImportersTelephoneNumber + def set_ImportersTelephoneNumber(self, ImportersTelephoneNumber): + self.ImportersTelephoneNumber = ImportersTelephoneNumber + def get_ImportersFaxNumber(self): + return self.ImportersFaxNumber + def set_ImportersFaxNumber(self, ImportersFaxNumber): + self.ImportersFaxNumber = ImportersFaxNumber + def get_ImportersEmail(self): + return self.ImportersEmail + def set_ImportersEmail(self, ImportersEmail): + self.ImportersEmail = ImportersEmail + def get_Machinable(self): + return self.Machinable + def set_Machinable(self, Machinable): + self.Machinable = Machinable + def get_DestinationRateIndicator(self): + return self.DestinationRateIndicator + def set_DestinationRateIndicator(self, DestinationRateIndicator): + self.DestinationRateIndicator = DestinationRateIndicator + def get_MID(self): + return self.MID + def set_MID(self, MID): + self.MID = MID + def get_LogisticsManagerMID(self): + return self.LogisticsManagerMID + def set_LogisticsManagerMID(self, LogisticsManagerMID): + self.LogisticsManagerMID = LogisticsManagerMID + def get_CRID(self): + return self.CRID + def set_CRID(self, CRID): + self.CRID = CRID + def get_VendorCode(self): + return self.VendorCode + def set_VendorCode(self, VendorCode): + self.VendorCode = VendorCode + def get_VendorProductVersionNumber(self): + return self.VendorProductVersionNumber + def set_VendorProductVersionNumber(self, VendorProductVersionNumber): + self.VendorProductVersionNumber = VendorProductVersionNumber + def get_ePostageMailerReporting(self): + return self.ePostageMailerReporting + def set_ePostageMailerReporting(self, ePostageMailerReporting): + self.ePostageMailerReporting = ePostageMailerReporting + def get_SenderFirstName(self): + return self.SenderFirstName + def set_SenderFirstName(self, SenderFirstName): + self.SenderFirstName = SenderFirstName + def get_SenderLastName(self): + return self.SenderLastName + def set_SenderLastName(self, SenderLastName): + self.SenderLastName = SenderLastName + def get_SenderBusinessName(self): + return self.SenderBusinessName + def set_SenderBusinessName(self, SenderBusinessName): + self.SenderBusinessName = SenderBusinessName + def get_SenderAddress1(self): + return self.SenderAddress1 + def set_SenderAddress1(self, SenderAddress1): + self.SenderAddress1 = SenderAddress1 + def get_SenderCity(self): + return self.SenderCity + def set_SenderCity(self, SenderCity): + self.SenderCity = SenderCity + def get_SenderState(self): + return self.SenderState + def set_SenderState(self, SenderState): + self.SenderState = SenderState + def get_SenderZip5(self): + return self.SenderZip5 + def set_SenderZip5(self, SenderZip5): + self.SenderZip5 = SenderZip5 + def get_SenderPhone(self): + return self.SenderPhone + def set_SenderPhone(self, SenderPhone): + self.SenderPhone = SenderPhone + def get_SenderEmail(self): + return self.SenderEmail + def set_SenderEmail(self, SenderEmail): + self.SenderEmail = SenderEmail + def get_ChargebackCode(self): + return self.ChargebackCode + def set_ChargebackCode(self, ChargebackCode): + self.ChargebackCode = ChargebackCode + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.Option is not None or + self.Revision is not None or + self.ImageParameters is not None or + self.FromFirstName is not None or + self.FromMiddleInitial is not None or + self.FromLastName is not None or + self.FromFirm is not None or + self.FromAddress1 is not None or + self.FromAddress2 is not None or + self.FromUrbanization is not None or + self.FromCity is not None or + self.FromState is not None or + self.FromZip5 is not None or + self.FromZip4 is not None or + self.FromPhone is not None or + self.FromCustomsReference is not None or + self.ToName is not None or + self.ToFirstName is not None or + self.ToLastName is not None or + self.ToFirm is not None or + self.ToAddress1 is not None or + self.ToAddress2 is not None or + self.ToAddress3 is not None or + self.ToCity is not None or + self.ToProvince is not None or + self.ToCountry is not None or + self.ToPostalCode is not None or + self.ToPOBoxFlag is not None or + self.ToPhone is not None or + self.ToFax is not None or + self.ToEmail is not None or + self.ImportersReferenceNumber is not None or + self.NonDeliveryOption is not None or + self.RedirectName is not None or + self.RedirectEmail is not None or + self.RedirectSMS is not None or + self.RedirectAddress is not None or + self.RedirectCity is not None or + self.RedirectState is not None or + self.RedirectZipCode is not None or + self.RedirectZip4 is not None or + self.Container is not None or + self.ShippingContents is not None or + self.Insured is not None or + self.InsuredNumber is not None or + self.InsuredAmount is not None or + self.GrossPounds is not None or + self.GrossOunces is not None or + self.ContentType is not None or + self.ContentTypeOther is not None or + self.Agreement is not None or + self.Comments is not None or + self.LicenseNumber is not None or + self.CertificateNumber is not None or + self.InvoiceNumber is not None or + self.ImageType is not None or + self.ImageLayout is not None or + self.CustomerRefNo is not None or + self.CustomerRefNo2 is not None or + self.POZipCode is not None or + self.LabelDate is not None or + self.EMCAAccount is not None or + self.HoldForManifest is not None or + self.EELPFC is not None or + self.PriceOptions is not None or + self.Width is not None or + self.Length is not None or + self.Height is not None or + self.Girth is not None or + self.ExtraServices is not None or + self.ActionCode is not None or + self.OptOutOfSPE is not None or + self.PermitNumber is not None or + self.AccountZipCode is not None or + self.ImportersReferenceType is not None or + self.ImportersTelephoneNumber is not None or + self.ImportersFaxNumber is not None or + self.ImportersEmail is not None or + self.Machinable is not None or + self.DestinationRateIndicator is not None or + self.MID is not None or + self.LogisticsManagerMID is not None or + self.CRID is not None or + self.VendorCode is not None or + self.VendorProductVersionNumber is not None or + self.ePostageMailerReporting is not None or + self.SenderFirstName is not None or + self.SenderLastName is not None or + self.SenderBusinessName is not None or + self.SenderAddress1 is not None or + self.SenderCity is not None or + self.SenderState is not None or + self.SenderZip5 is not None or + self.SenderPhone is not None or + self.SenderEmail is not None or + self.ChargebackCode is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSPriorityMailIntlRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('eVSPriorityMailIntlRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'eVSPriorityMailIntlRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='eVSPriorityMailIntlRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='eVSPriorityMailIntlRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='eVSPriorityMailIntlRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSPriorityMailIntlRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Option is not None: + namespaceprefix_ = self.Option_nsprefix_ + ':' if (UseCapturedNS_ and self.Option_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOption>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Option), input_name='Option')), namespaceprefix_ , eol_)) + if self.Revision is not None: + namespaceprefix_ = self.Revision_nsprefix_ + ':' if (UseCapturedNS_ and self.Revision_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRevision>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Revision), input_name='Revision')), namespaceprefix_ , eol_)) + if self.ImageParameters is not None: + namespaceprefix_ = self.ImageParameters_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageParameters_nsprefix_) else '' + self.ImageParameters.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ImageParameters', pretty_print=pretty_print) + if self.FromFirstName is not None: + namespaceprefix_ = self.FromFirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.FromFirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromFirstName), input_name='FromFirstName')), namespaceprefix_ , eol_)) + if self.FromMiddleInitial is not None: + namespaceprefix_ = self.FromMiddleInitial_nsprefix_ + ':' if (UseCapturedNS_ and self.FromMiddleInitial_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromMiddleInitial>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromMiddleInitial), input_name='FromMiddleInitial')), namespaceprefix_ , eol_)) + if self.FromLastName is not None: + namespaceprefix_ = self.FromLastName_nsprefix_ + ':' if (UseCapturedNS_ and self.FromLastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromLastName), input_name='FromLastName')), namespaceprefix_ , eol_)) + if self.FromFirm is not None: + namespaceprefix_ = self.FromFirm_nsprefix_ + ':' if (UseCapturedNS_ and self.FromFirm_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromFirm>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromFirm), input_name='FromFirm')), namespaceprefix_ , eol_)) + if self.FromAddress1 is not None: + namespaceprefix_ = self.FromAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.FromAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromAddress1), input_name='FromAddress1')), namespaceprefix_ , eol_)) + if self.FromAddress2 is not None: + namespaceprefix_ = self.FromAddress2_nsprefix_ + ':' if (UseCapturedNS_ and self.FromAddress2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromAddress2), input_name='FromAddress2')), namespaceprefix_ , eol_)) + if self.FromUrbanization is not None: + namespaceprefix_ = self.FromUrbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.FromUrbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromUrbanization), input_name='FromUrbanization')), namespaceprefix_ , eol_)) + if self.FromCity is not None: + namespaceprefix_ = self.FromCity_nsprefix_ + ':' if (UseCapturedNS_ and self.FromCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromCity), input_name='FromCity')), namespaceprefix_ , eol_)) + if self.FromState is not None: + namespaceprefix_ = self.FromState_nsprefix_ + ':' if (UseCapturedNS_ and self.FromState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromState), input_name='FromState')), namespaceprefix_ , eol_)) + if self.FromZip5 is not None: + namespaceprefix_ = self.FromZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.FromZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromZip5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromZip5), input_name='FromZip5')), namespaceprefix_ , eol_)) + if self.FromZip4 is not None: + namespaceprefix_ = self.FromZip4_nsprefix_ + ':' if (UseCapturedNS_ and self.FromZip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromZip4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromZip4), input_name='FromZip4')), namespaceprefix_ , eol_)) + if self.FromPhone is not None: + namespaceprefix_ = self.FromPhone_nsprefix_ + ':' if (UseCapturedNS_ and self.FromPhone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromPhone), input_name='FromPhone')), namespaceprefix_ , eol_)) + if self.FromCustomsReference is not None: + namespaceprefix_ = self.FromCustomsReference_nsprefix_ + ':' if (UseCapturedNS_ and self.FromCustomsReference_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromCustomsReference>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromCustomsReference), input_name='FromCustomsReference')), namespaceprefix_ , eol_)) + if self.ToName is not None: + namespaceprefix_ = self.ToName_nsprefix_ + ':' if (UseCapturedNS_ and self.ToName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToName), input_name='ToName')), namespaceprefix_ , eol_)) + if self.ToFirstName is not None: + namespaceprefix_ = self.ToFirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.ToFirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToFirstName), input_name='ToFirstName')), namespaceprefix_ , eol_)) + if self.ToLastName is not None: + namespaceprefix_ = self.ToLastName_nsprefix_ + ':' if (UseCapturedNS_ and self.ToLastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToLastName), input_name='ToLastName')), namespaceprefix_ , eol_)) + if self.ToFirm is not None: + namespaceprefix_ = self.ToFirm_nsprefix_ + ':' if (UseCapturedNS_ and self.ToFirm_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToFirm>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToFirm), input_name='ToFirm')), namespaceprefix_ , eol_)) + if self.ToAddress1 is not None: + namespaceprefix_ = self.ToAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress1), input_name='ToAddress1')), namespaceprefix_ , eol_)) + if self.ToAddress2 is not None: + namespaceprefix_ = self.ToAddress2_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress2), input_name='ToAddress2')), namespaceprefix_ , eol_)) + if self.ToAddress3 is not None: + namespaceprefix_ = self.ToAddress3_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress3_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress3>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress3), input_name='ToAddress3')), namespaceprefix_ , eol_)) + if self.ToCity is not None: + namespaceprefix_ = self.ToCity_nsprefix_ + ':' if (UseCapturedNS_ and self.ToCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToCity), input_name='ToCity')), namespaceprefix_ , eol_)) + if self.ToProvince is not None: + namespaceprefix_ = self.ToProvince_nsprefix_ + ':' if (UseCapturedNS_ and self.ToProvince_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToProvince>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToProvince), input_name='ToProvince')), namespaceprefix_ , eol_)) + if self.ToCountry is not None: + namespaceprefix_ = self.ToCountry_nsprefix_ + ':' if (UseCapturedNS_ and self.ToCountry_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToCountry>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToCountry), input_name='ToCountry')), namespaceprefix_ , eol_)) + if self.ToPostalCode is not None: + namespaceprefix_ = self.ToPostalCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ToPostalCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToPostalCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToPostalCode), input_name='ToPostalCode')), namespaceprefix_ , eol_)) + if self.ToPOBoxFlag is not None: + namespaceprefix_ = self.ToPOBoxFlag_nsprefix_ + ':' if (UseCapturedNS_ and self.ToPOBoxFlag_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToPOBoxFlag>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToPOBoxFlag), input_name='ToPOBoxFlag')), namespaceprefix_ , eol_)) + if self.ToPhone is not None: + namespaceprefix_ = self.ToPhone_nsprefix_ + ':' if (UseCapturedNS_ and self.ToPhone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToPhone), input_name='ToPhone')), namespaceprefix_ , eol_)) + if self.ToFax is not None: + namespaceprefix_ = self.ToFax_nsprefix_ + ':' if (UseCapturedNS_ and self.ToFax_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToFax>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToFax), input_name='ToFax')), namespaceprefix_ , eol_)) + if self.ToEmail is not None: + namespaceprefix_ = self.ToEmail_nsprefix_ + ':' if (UseCapturedNS_ and self.ToEmail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToEmail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToEmail), input_name='ToEmail')), namespaceprefix_ , eol_)) + if self.ImportersReferenceNumber is not None: + namespaceprefix_ = self.ImportersReferenceNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.ImportersReferenceNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImportersReferenceNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImportersReferenceNumber), input_name='ImportersReferenceNumber')), namespaceprefix_ , eol_)) + if self.NonDeliveryOption is not None: + namespaceprefix_ = self.NonDeliveryOption_nsprefix_ + ':' if (UseCapturedNS_ and self.NonDeliveryOption_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNonDeliveryOption>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.NonDeliveryOption), input_name='NonDeliveryOption')), namespaceprefix_ , eol_)) + if self.RedirectName is not None: + namespaceprefix_ = self.RedirectName_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectName), input_name='RedirectName')), namespaceprefix_ , eol_)) + if self.RedirectEmail is not None: + namespaceprefix_ = self.RedirectEmail_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectEmail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectEmail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectEmail), input_name='RedirectEmail')), namespaceprefix_ , eol_)) + if self.RedirectSMS is not None: + namespaceprefix_ = self.RedirectSMS_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectSMS_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectSMS>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectSMS), input_name='RedirectSMS')), namespaceprefix_ , eol_)) + if self.RedirectAddress is not None: + namespaceprefix_ = self.RedirectAddress_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectAddress_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectAddress>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectAddress), input_name='RedirectAddress')), namespaceprefix_ , eol_)) + if self.RedirectCity is not None: + namespaceprefix_ = self.RedirectCity_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectCity), input_name='RedirectCity')), namespaceprefix_ , eol_)) + if self.RedirectState is not None: + namespaceprefix_ = self.RedirectState_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectState), input_name='RedirectState')), namespaceprefix_ , eol_)) + if self.RedirectZipCode is not None: + namespaceprefix_ = self.RedirectZipCode_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectZipCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectZipCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectZipCode), input_name='RedirectZipCode')), namespaceprefix_ , eol_)) + if self.RedirectZip4 is not None: + namespaceprefix_ = self.RedirectZip4_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectZip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectZip4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectZip4), input_name='RedirectZip4')), namespaceprefix_ , eol_)) + if self.Container is not None: + namespaceprefix_ = self.Container_nsprefix_ + ':' if (UseCapturedNS_ and self.Container_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContainer>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Container), input_name='Container')), namespaceprefix_ , eol_)) + if self.ShippingContents is not None: + namespaceprefix_ = self.ShippingContents_nsprefix_ + ':' if (UseCapturedNS_ and self.ShippingContents_nsprefix_) else '' + self.ShippingContents.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ShippingContents', pretty_print=pretty_print) + if self.Insured is not None: + namespaceprefix_ = self.Insured_nsprefix_ + ':' if (UseCapturedNS_ and self.Insured_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInsured>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.Insured, input_name='Insured'), namespaceprefix_ , eol_)) + if self.InsuredNumber is not None: + namespaceprefix_ = self.InsuredNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.InsuredNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInsuredNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InsuredNumber), input_name='InsuredNumber')), namespaceprefix_ , eol_)) + if self.InsuredAmount is not None: + namespaceprefix_ = self.InsuredAmount_nsprefix_ + ':' if (UseCapturedNS_ and self.InsuredAmount_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInsuredAmount>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InsuredAmount), input_name='InsuredAmount')), namespaceprefix_ , eol_)) + if self.GrossPounds is not None: + namespaceprefix_ = self.GrossPounds_nsprefix_ + ':' if (UseCapturedNS_ and self.GrossPounds_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGrossPounds>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.GrossPounds, input_name='GrossPounds'), namespaceprefix_ , eol_)) + if self.GrossOunces is not None: + namespaceprefix_ = self.GrossOunces_nsprefix_ + ':' if (UseCapturedNS_ and self.GrossOunces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGrossOunces>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.GrossOunces, input_name='GrossOunces'), namespaceprefix_ , eol_)) + if self.ContentType is not None: + namespaceprefix_ = self.ContentType_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentType), input_name='ContentType')), namespaceprefix_ , eol_)) + if self.ContentTypeOther is not None: + namespaceprefix_ = self.ContentTypeOther_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentTypeOther_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentTypeOther>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentTypeOther), input_name='ContentTypeOther')), namespaceprefix_ , eol_)) + if self.Agreement is not None: + namespaceprefix_ = self.Agreement_nsprefix_ + ':' if (UseCapturedNS_ and self.Agreement_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAgreement>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Agreement), input_name='Agreement')), namespaceprefix_ , eol_)) + if self.Comments is not None: + namespaceprefix_ = self.Comments_nsprefix_ + ':' if (UseCapturedNS_ and self.Comments_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sComments>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Comments), input_name='Comments')), namespaceprefix_ , eol_)) + if self.LicenseNumber is not None: + namespaceprefix_ = self.LicenseNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.LicenseNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLicenseNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LicenseNumber), input_name='LicenseNumber')), namespaceprefix_ , eol_)) + if self.CertificateNumber is not None: + namespaceprefix_ = self.CertificateNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.CertificateNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCertificateNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CertificateNumber), input_name='CertificateNumber')), namespaceprefix_ , eol_)) + if self.InvoiceNumber is not None: + namespaceprefix_ = self.InvoiceNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.InvoiceNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInvoiceNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InvoiceNumber), input_name='InvoiceNumber')), namespaceprefix_ , eol_)) + if self.ImageType is not None: + namespaceprefix_ = self.ImageType_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageType), input_name='ImageType')), namespaceprefix_ , eol_)) + if self.ImageLayout is not None: + namespaceprefix_ = self.ImageLayout_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageLayout_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageLayout>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageLayout), input_name='ImageLayout')), namespaceprefix_ , eol_)) + if self.CustomerRefNo is not None: + namespaceprefix_ = self.CustomerRefNo_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerRefNo_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerRefNo>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerRefNo), input_name='CustomerRefNo')), namespaceprefix_ , eol_)) + if self.CustomerRefNo2 is not None: + namespaceprefix_ = self.CustomerRefNo2_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerRefNo2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerRefNo2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerRefNo2), input_name='CustomerRefNo2')), namespaceprefix_ , eol_)) + if self.POZipCode is not None: + namespaceprefix_ = self.POZipCode_nsprefix_ + ':' if (UseCapturedNS_ and self.POZipCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPOZipCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.POZipCode), input_name='POZipCode')), namespaceprefix_ , eol_)) + if self.LabelDate is not None: + namespaceprefix_ = self.LabelDate_nsprefix_ + ':' if (UseCapturedNS_ and self.LabelDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLabelDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LabelDate), input_name='LabelDate')), namespaceprefix_ , eol_)) + if self.EMCAAccount is not None: + namespaceprefix_ = self.EMCAAccount_nsprefix_ + ':' if (UseCapturedNS_ and self.EMCAAccount_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEMCAAccount>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EMCAAccount), input_name='EMCAAccount')), namespaceprefix_ , eol_)) + if self.HoldForManifest is not None: + namespaceprefix_ = self.HoldForManifest_nsprefix_ + ':' if (UseCapturedNS_ and self.HoldForManifest_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHoldForManifest>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.HoldForManifest), input_name='HoldForManifest')), namespaceprefix_ , eol_)) + if self.EELPFC is not None: + namespaceprefix_ = self.EELPFC_nsprefix_ + ':' if (UseCapturedNS_ and self.EELPFC_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEELPFC>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EELPFC), input_name='EELPFC')), namespaceprefix_ , eol_)) + if self.PriceOptions is not None: + namespaceprefix_ = self.PriceOptions_nsprefix_ + ':' if (UseCapturedNS_ and self.PriceOptions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPriceOptions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PriceOptions), input_name='PriceOptions')), namespaceprefix_ , eol_)) + if self.Width is not None: + namespaceprefix_ = self.Width_nsprefix_ + ':' if (UseCapturedNS_ and self.Width_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sWidth>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Width), input_name='Width')), namespaceprefix_ , eol_)) + if self.Length is not None: + namespaceprefix_ = self.Length_nsprefix_ + ':' if (UseCapturedNS_ and self.Length_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLength>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Length), input_name='Length')), namespaceprefix_ , eol_)) + if self.Height is not None: + namespaceprefix_ = self.Height_nsprefix_ + ':' if (UseCapturedNS_ and self.Height_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHeight>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Height), input_name='Height')), namespaceprefix_ , eol_)) + if self.Girth is not None: + namespaceprefix_ = self.Girth_nsprefix_ + ':' if (UseCapturedNS_ and self.Girth_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGirth>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Girth), input_name='Girth')), namespaceprefix_ , eol_)) + if self.ExtraServices is not None: + namespaceprefix_ = self.ExtraServices_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraServices_nsprefix_) else '' + self.ExtraServices.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ExtraServices', pretty_print=pretty_print) + if self.ActionCode is not None: + namespaceprefix_ = self.ActionCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ActionCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sActionCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ActionCode), input_name='ActionCode')), namespaceprefix_ , eol_)) + if self.OptOutOfSPE is not None: + namespaceprefix_ = self.OptOutOfSPE_nsprefix_ + ':' if (UseCapturedNS_ and self.OptOutOfSPE_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOptOutOfSPE>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.OptOutOfSPE, input_name='OptOutOfSPE'), namespaceprefix_ , eol_)) + if self.PermitNumber is not None: + namespaceprefix_ = self.PermitNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PermitNumber), input_name='PermitNumber')), namespaceprefix_ , eol_)) + if self.AccountZipCode is not None: + namespaceprefix_ = self.AccountZipCode_nsprefix_ + ':' if (UseCapturedNS_ and self.AccountZipCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAccountZipCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AccountZipCode), input_name='AccountZipCode')), namespaceprefix_ , eol_)) + if self.ImportersReferenceType is not None: + namespaceprefix_ = self.ImportersReferenceType_nsprefix_ + ':' if (UseCapturedNS_ and self.ImportersReferenceType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImportersReferenceType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImportersReferenceType), input_name='ImportersReferenceType')), namespaceprefix_ , eol_)) + if self.ImportersTelephoneNumber is not None: + namespaceprefix_ = self.ImportersTelephoneNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.ImportersTelephoneNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImportersTelephoneNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImportersTelephoneNumber), input_name='ImportersTelephoneNumber')), namespaceprefix_ , eol_)) + if self.ImportersFaxNumber is not None: + namespaceprefix_ = self.ImportersFaxNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.ImportersFaxNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImportersFaxNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImportersFaxNumber), input_name='ImportersFaxNumber')), namespaceprefix_ , eol_)) + if self.ImportersEmail is not None: + namespaceprefix_ = self.ImportersEmail_nsprefix_ + ':' if (UseCapturedNS_ and self.ImportersEmail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImportersEmail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImportersEmail), input_name='ImportersEmail')), namespaceprefix_ , eol_)) + if self.Machinable is not None: + namespaceprefix_ = self.Machinable_nsprefix_ + ':' if (UseCapturedNS_ and self.Machinable_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMachinable>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.Machinable, input_name='Machinable'), namespaceprefix_ , eol_)) + if self.DestinationRateIndicator is not None: + namespaceprefix_ = self.DestinationRateIndicator_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationRateIndicator_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationRateIndicator>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DestinationRateIndicator), input_name='DestinationRateIndicator')), namespaceprefix_ , eol_)) + if self.MID is not None: + namespaceprefix_ = self.MID_nsprefix_ + ':' if (UseCapturedNS_ and self.MID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MID), input_name='MID')), namespaceprefix_ , eol_)) + if self.LogisticsManagerMID is not None: + namespaceprefix_ = self.LogisticsManagerMID_nsprefix_ + ':' if (UseCapturedNS_ and self.LogisticsManagerMID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLogisticsManagerMID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LogisticsManagerMID), input_name='LogisticsManagerMID')), namespaceprefix_ , eol_)) + if self.CRID is not None: + namespaceprefix_ = self.CRID_nsprefix_ + ':' if (UseCapturedNS_ and self.CRID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCRID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CRID), input_name='CRID')), namespaceprefix_ , eol_)) + if self.VendorCode is not None: + namespaceprefix_ = self.VendorCode_nsprefix_ + ':' if (UseCapturedNS_ and self.VendorCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sVendorCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.VendorCode), input_name='VendorCode')), namespaceprefix_ , eol_)) + if self.VendorProductVersionNumber is not None: + namespaceprefix_ = self.VendorProductVersionNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.VendorProductVersionNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sVendorProductVersionNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.VendorProductVersionNumber), input_name='VendorProductVersionNumber')), namespaceprefix_ , eol_)) + if self.ePostageMailerReporting is not None: + namespaceprefix_ = self.ePostageMailerReporting_nsprefix_ + ':' if (UseCapturedNS_ and self.ePostageMailerReporting_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sePostageMailerReporting>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ePostageMailerReporting), input_name='ePostageMailerReporting')), namespaceprefix_ , eol_)) + if self.SenderFirstName is not None: + namespaceprefix_ = self.SenderFirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderFirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderFirstName), input_name='SenderFirstName')), namespaceprefix_ , eol_)) + if self.SenderLastName is not None: + namespaceprefix_ = self.SenderLastName_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderLastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderLastName), input_name='SenderLastName')), namespaceprefix_ , eol_)) + if self.SenderBusinessName is not None: + namespaceprefix_ = self.SenderBusinessName_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderBusinessName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderBusinessName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderBusinessName), input_name='SenderBusinessName')), namespaceprefix_ , eol_)) + if self.SenderAddress1 is not None: + namespaceprefix_ = self.SenderAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderAddress1), input_name='SenderAddress1')), namespaceprefix_ , eol_)) + if self.SenderCity is not None: + namespaceprefix_ = self.SenderCity_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderCity), input_name='SenderCity')), namespaceprefix_ , eol_)) + if self.SenderState is not None: + namespaceprefix_ = self.SenderState_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderState), input_name='SenderState')), namespaceprefix_ , eol_)) + if self.SenderZip5 is not None: + namespaceprefix_ = self.SenderZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderZip5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderZip5), input_name='SenderZip5')), namespaceprefix_ , eol_)) + if self.SenderPhone is not None: + namespaceprefix_ = self.SenderPhone_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderPhone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderPhone), input_name='SenderPhone')), namespaceprefix_ , eol_)) + if self.SenderEmail is not None: + namespaceprefix_ = self.SenderEmail_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderEmail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderEmail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderEmail), input_name='SenderEmail')), namespaceprefix_ , eol_)) + if self.ChargebackCode is not None: + namespaceprefix_ = self.ChargebackCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ChargebackCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sChargebackCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ChargebackCode), input_name='ChargebackCode')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Option': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Option') + value_ = self.gds_validate_string(value_, node, 'Option') + self.Option = value_ + self.Option_nsprefix_ = child_.prefix + elif nodeName_ == 'Revision': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Revision') + value_ = self.gds_validate_string(value_, node, 'Revision') + self.Revision = value_ + self.Revision_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageParameters': + obj_ = ImageParametersType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ImageParameters = obj_ + obj_.original_tagname_ = 'ImageParameters' + elif nodeName_ == 'FromFirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromFirstName') + value_ = self.gds_validate_string(value_, node, 'FromFirstName') + self.FromFirstName = value_ + self.FromFirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'FromMiddleInitial': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromMiddleInitial') + value_ = self.gds_validate_string(value_, node, 'FromMiddleInitial') + self.FromMiddleInitial = value_ + self.FromMiddleInitial_nsprefix_ = child_.prefix + elif nodeName_ == 'FromLastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromLastName') + value_ = self.gds_validate_string(value_, node, 'FromLastName') + self.FromLastName = value_ + self.FromLastName_nsprefix_ = child_.prefix + elif nodeName_ == 'FromFirm': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromFirm') + value_ = self.gds_validate_string(value_, node, 'FromFirm') + self.FromFirm = value_ + self.FromFirm_nsprefix_ = child_.prefix + elif nodeName_ == 'FromAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromAddress1') + value_ = self.gds_validate_string(value_, node, 'FromAddress1') + self.FromAddress1 = value_ + self.FromAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'FromAddress2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromAddress2') + value_ = self.gds_validate_string(value_, node, 'FromAddress2') + self.FromAddress2 = value_ + self.FromAddress2_nsprefix_ = child_.prefix + elif nodeName_ == 'FromUrbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromUrbanization') + value_ = self.gds_validate_string(value_, node, 'FromUrbanization') + self.FromUrbanization = value_ + self.FromUrbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'FromCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromCity') + value_ = self.gds_validate_string(value_, node, 'FromCity') + self.FromCity = value_ + self.FromCity_nsprefix_ = child_.prefix + elif nodeName_ == 'FromState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromState') + value_ = self.gds_validate_string(value_, node, 'FromState') + self.FromState = value_ + self.FromState_nsprefix_ = child_.prefix + elif nodeName_ == 'FromZip5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromZip5') + value_ = self.gds_validate_string(value_, node, 'FromZip5') + self.FromZip5 = value_ + self.FromZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'FromZip4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromZip4') + value_ = self.gds_validate_string(value_, node, 'FromZip4') + self.FromZip4 = value_ + self.FromZip4_nsprefix_ = child_.prefix + elif nodeName_ == 'FromPhone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromPhone') + value_ = self.gds_validate_string(value_, node, 'FromPhone') + self.FromPhone = value_ + self.FromPhone_nsprefix_ = child_.prefix + elif nodeName_ == 'FromCustomsReference': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromCustomsReference') + value_ = self.gds_validate_string(value_, node, 'FromCustomsReference') + self.FromCustomsReference = value_ + self.FromCustomsReference_nsprefix_ = child_.prefix + elif nodeName_ == 'ToName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToName') + value_ = self.gds_validate_string(value_, node, 'ToName') + self.ToName = value_ + self.ToName_nsprefix_ = child_.prefix + elif nodeName_ == 'ToFirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToFirstName') + value_ = self.gds_validate_string(value_, node, 'ToFirstName') + self.ToFirstName = value_ + self.ToFirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'ToLastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToLastName') + value_ = self.gds_validate_string(value_, node, 'ToLastName') + self.ToLastName = value_ + self.ToLastName_nsprefix_ = child_.prefix + elif nodeName_ == 'ToFirm': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToFirm') + value_ = self.gds_validate_string(value_, node, 'ToFirm') + self.ToFirm = value_ + self.ToFirm_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress1') + value_ = self.gds_validate_string(value_, node, 'ToAddress1') + self.ToAddress1 = value_ + self.ToAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress2') + value_ = self.gds_validate_string(value_, node, 'ToAddress2') + self.ToAddress2 = value_ + self.ToAddress2_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress3': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress3') + value_ = self.gds_validate_string(value_, node, 'ToAddress3') + self.ToAddress3 = value_ + self.ToAddress3_nsprefix_ = child_.prefix + elif nodeName_ == 'ToCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToCity') + value_ = self.gds_validate_string(value_, node, 'ToCity') + self.ToCity = value_ + self.ToCity_nsprefix_ = child_.prefix + elif nodeName_ == 'ToProvince': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToProvince') + value_ = self.gds_validate_string(value_, node, 'ToProvince') + self.ToProvince = value_ + self.ToProvince_nsprefix_ = child_.prefix + elif nodeName_ == 'ToCountry': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToCountry') + value_ = self.gds_validate_string(value_, node, 'ToCountry') + self.ToCountry = value_ + self.ToCountry_nsprefix_ = child_.prefix + elif nodeName_ == 'ToPostalCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToPostalCode') + value_ = self.gds_validate_string(value_, node, 'ToPostalCode') + self.ToPostalCode = value_ + self.ToPostalCode_nsprefix_ = child_.prefix + elif nodeName_ == 'ToPOBoxFlag': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToPOBoxFlag') + value_ = self.gds_validate_string(value_, node, 'ToPOBoxFlag') + self.ToPOBoxFlag = value_ + self.ToPOBoxFlag_nsprefix_ = child_.prefix + elif nodeName_ == 'ToPhone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToPhone') + value_ = self.gds_validate_string(value_, node, 'ToPhone') + self.ToPhone = value_ + self.ToPhone_nsprefix_ = child_.prefix + elif nodeName_ == 'ToFax': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToFax') + value_ = self.gds_validate_string(value_, node, 'ToFax') + self.ToFax = value_ + self.ToFax_nsprefix_ = child_.prefix + elif nodeName_ == 'ToEmail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToEmail') + value_ = self.gds_validate_string(value_, node, 'ToEmail') + self.ToEmail = value_ + self.ToEmail_nsprefix_ = child_.prefix + elif nodeName_ == 'ImportersReferenceNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImportersReferenceNumber') + value_ = self.gds_validate_string(value_, node, 'ImportersReferenceNumber') + self.ImportersReferenceNumber = value_ + self.ImportersReferenceNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'NonDeliveryOption': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'NonDeliveryOption') + value_ = self.gds_validate_string(value_, node, 'NonDeliveryOption') + self.NonDeliveryOption = value_ + self.NonDeliveryOption_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectName') + value_ = self.gds_validate_string(value_, node, 'RedirectName') + self.RedirectName = value_ + self.RedirectName_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectEmail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectEmail') + value_ = self.gds_validate_string(value_, node, 'RedirectEmail') + self.RedirectEmail = value_ + self.RedirectEmail_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectSMS': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectSMS') + value_ = self.gds_validate_string(value_, node, 'RedirectSMS') + self.RedirectSMS = value_ + self.RedirectSMS_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectAddress': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectAddress') + value_ = self.gds_validate_string(value_, node, 'RedirectAddress') + self.RedirectAddress = value_ + self.RedirectAddress_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectCity') + value_ = self.gds_validate_string(value_, node, 'RedirectCity') + self.RedirectCity = value_ + self.RedirectCity_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectState') + value_ = self.gds_validate_string(value_, node, 'RedirectState') + self.RedirectState = value_ + self.RedirectState_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectZipCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectZipCode') + value_ = self.gds_validate_string(value_, node, 'RedirectZipCode') + self.RedirectZipCode = value_ + self.RedirectZipCode_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectZip4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectZip4') + value_ = self.gds_validate_string(value_, node, 'RedirectZip4') + self.RedirectZip4 = value_ + self.RedirectZip4_nsprefix_ = child_.prefix + elif nodeName_ == 'Container': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Container') + value_ = self.gds_validate_string(value_, node, 'Container') + self.Container = value_ + self.Container_nsprefix_ = child_.prefix + elif nodeName_ == 'ShippingContents': + obj_ = ShippingContentsType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ShippingContents = obj_ + obj_.original_tagname_ = 'ShippingContents' + elif nodeName_ == 'Insured': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'Insured') + ival_ = self.gds_validate_boolean(ival_, node, 'Insured') + self.Insured = ival_ + self.Insured_nsprefix_ = child_.prefix + elif nodeName_ == 'InsuredNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'InsuredNumber') + value_ = self.gds_validate_string(value_, node, 'InsuredNumber') + self.InsuredNumber = value_ + self.InsuredNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'InsuredAmount': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'InsuredAmount') + value_ = self.gds_validate_string(value_, node, 'InsuredAmount') + self.InsuredAmount = value_ + self.InsuredAmount_nsprefix_ = child_.prefix + elif nodeName_ == 'GrossPounds' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'GrossPounds') + ival_ = self.gds_validate_integer(ival_, node, 'GrossPounds') + self.GrossPounds = ival_ + self.GrossPounds_nsprefix_ = child_.prefix + elif nodeName_ == 'GrossOunces' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'GrossOunces') + ival_ = self.gds_validate_integer(ival_, node, 'GrossOunces') + self.GrossOunces = ival_ + self.GrossOunces_nsprefix_ = child_.prefix + elif nodeName_ == 'ContentType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentType') + value_ = self.gds_validate_string(value_, node, 'ContentType') + self.ContentType = value_ + self.ContentType_nsprefix_ = child_.prefix + elif nodeName_ == 'ContentTypeOther': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentTypeOther') + value_ = self.gds_validate_string(value_, node, 'ContentTypeOther') + self.ContentTypeOther = value_ + self.ContentTypeOther_nsprefix_ = child_.prefix + elif nodeName_ == 'Agreement': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Agreement') + value_ = self.gds_validate_string(value_, node, 'Agreement') + self.Agreement = value_ + self.Agreement_nsprefix_ = child_.prefix + elif nodeName_ == 'Comments': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Comments') + value_ = self.gds_validate_string(value_, node, 'Comments') + self.Comments = value_ + self.Comments_nsprefix_ = child_.prefix + elif nodeName_ == 'LicenseNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LicenseNumber') + value_ = self.gds_validate_string(value_, node, 'LicenseNumber') + self.LicenseNumber = value_ + self.LicenseNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'CertificateNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CertificateNumber') + value_ = self.gds_validate_string(value_, node, 'CertificateNumber') + self.CertificateNumber = value_ + self.CertificateNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'InvoiceNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'InvoiceNumber') + value_ = self.gds_validate_string(value_, node, 'InvoiceNumber') + self.InvoiceNumber = value_ + self.InvoiceNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageType') + value_ = self.gds_validate_string(value_, node, 'ImageType') + self.ImageType = value_ + self.ImageType_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageLayout': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageLayout') + value_ = self.gds_validate_string(value_, node, 'ImageLayout') + self.ImageLayout = value_ + self.ImageLayout_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerRefNo': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerRefNo') + value_ = self.gds_validate_string(value_, node, 'CustomerRefNo') + self.CustomerRefNo = value_ + self.CustomerRefNo_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerRefNo2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerRefNo2') + value_ = self.gds_validate_string(value_, node, 'CustomerRefNo2') + self.CustomerRefNo2 = value_ + self.CustomerRefNo2_nsprefix_ = child_.prefix + elif nodeName_ == 'POZipCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'POZipCode') + value_ = self.gds_validate_string(value_, node, 'POZipCode') + self.POZipCode = value_ + self.POZipCode_nsprefix_ = child_.prefix + elif nodeName_ == 'LabelDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LabelDate') + value_ = self.gds_validate_string(value_, node, 'LabelDate') + self.LabelDate = value_ + self.LabelDate_nsprefix_ = child_.prefix + elif nodeName_ == 'EMCAAccount': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EMCAAccount') + value_ = self.gds_validate_string(value_, node, 'EMCAAccount') + self.EMCAAccount = value_ + self.EMCAAccount_nsprefix_ = child_.prefix + elif nodeName_ == 'HoldForManifest': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'HoldForManifest') + value_ = self.gds_validate_string(value_, node, 'HoldForManifest') + self.HoldForManifest = value_ + self.HoldForManifest_nsprefix_ = child_.prefix + elif nodeName_ == 'EELPFC': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EELPFC') + value_ = self.gds_validate_string(value_, node, 'EELPFC') + self.EELPFC = value_ + self.EELPFC_nsprefix_ = child_.prefix + elif nodeName_ == 'PriceOptions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PriceOptions') + value_ = self.gds_validate_string(value_, node, 'PriceOptions') + self.PriceOptions = value_ + self.PriceOptions_nsprefix_ = child_.prefix + elif nodeName_ == 'Width': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Width') + value_ = self.gds_validate_string(value_, node, 'Width') + self.Width = value_ + self.Width_nsprefix_ = child_.prefix + elif nodeName_ == 'Length': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Length') + value_ = self.gds_validate_string(value_, node, 'Length') + self.Length = value_ + self.Length_nsprefix_ = child_.prefix + elif nodeName_ == 'Height': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Height') + value_ = self.gds_validate_string(value_, node, 'Height') + self.Height = value_ + self.Height_nsprefix_ = child_.prefix + elif nodeName_ == 'Girth': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Girth') + value_ = self.gds_validate_string(value_, node, 'Girth') + self.Girth = value_ + self.Girth_nsprefix_ = child_.prefix + elif nodeName_ == 'ExtraServices': + obj_ = ExtraServicesType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ExtraServices = obj_ + obj_.original_tagname_ = 'ExtraServices' + elif nodeName_ == 'ActionCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ActionCode') + value_ = self.gds_validate_string(value_, node, 'ActionCode') + self.ActionCode = value_ + self.ActionCode_nsprefix_ = child_.prefix + elif nodeName_ == 'OptOutOfSPE': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'OptOutOfSPE') + ival_ = self.gds_validate_boolean(ival_, node, 'OptOutOfSPE') + self.OptOutOfSPE = ival_ + self.OptOutOfSPE_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PermitNumber') + value_ = self.gds_validate_string(value_, node, 'PermitNumber') + self.PermitNumber = value_ + self.PermitNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'AccountZipCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AccountZipCode') + value_ = self.gds_validate_string(value_, node, 'AccountZipCode') + self.AccountZipCode = value_ + self.AccountZipCode_nsprefix_ = child_.prefix + elif nodeName_ == 'ImportersReferenceType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImportersReferenceType') + value_ = self.gds_validate_string(value_, node, 'ImportersReferenceType') + self.ImportersReferenceType = value_ + self.ImportersReferenceType_nsprefix_ = child_.prefix + elif nodeName_ == 'ImportersTelephoneNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImportersTelephoneNumber') + value_ = self.gds_validate_string(value_, node, 'ImportersTelephoneNumber') + self.ImportersTelephoneNumber = value_ + self.ImportersTelephoneNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'ImportersFaxNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImportersFaxNumber') + value_ = self.gds_validate_string(value_, node, 'ImportersFaxNumber') + self.ImportersFaxNumber = value_ + self.ImportersFaxNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'ImportersEmail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImportersEmail') + value_ = self.gds_validate_string(value_, node, 'ImportersEmail') + self.ImportersEmail = value_ + self.ImportersEmail_nsprefix_ = child_.prefix + elif nodeName_ == 'Machinable': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'Machinable') + ival_ = self.gds_validate_boolean(ival_, node, 'Machinable') + self.Machinable = ival_ + self.Machinable_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationRateIndicator': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DestinationRateIndicator') + value_ = self.gds_validate_string(value_, node, 'DestinationRateIndicator') + self.DestinationRateIndicator = value_ + self.DestinationRateIndicator_nsprefix_ = child_.prefix + elif nodeName_ == 'MID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'MID') + value_ = self.gds_validate_string(value_, node, 'MID') + self.MID = value_ + self.MID_nsprefix_ = child_.prefix + elif nodeName_ == 'LogisticsManagerMID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LogisticsManagerMID') + value_ = self.gds_validate_string(value_, node, 'LogisticsManagerMID') + self.LogisticsManagerMID = value_ + self.LogisticsManagerMID_nsprefix_ = child_.prefix + elif nodeName_ == 'CRID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CRID') + value_ = self.gds_validate_string(value_, node, 'CRID') + self.CRID = value_ + self.CRID_nsprefix_ = child_.prefix + elif nodeName_ == 'VendorCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'VendorCode') + value_ = self.gds_validate_string(value_, node, 'VendorCode') + self.VendorCode = value_ + self.VendorCode_nsprefix_ = child_.prefix + elif nodeName_ == 'VendorProductVersionNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'VendorProductVersionNumber') + value_ = self.gds_validate_string(value_, node, 'VendorProductVersionNumber') + self.VendorProductVersionNumber = value_ + self.VendorProductVersionNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'ePostageMailerReporting': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ePostageMailerReporting') + value_ = self.gds_validate_string(value_, node, 'ePostageMailerReporting') + self.ePostageMailerReporting = value_ + self.ePostageMailerReporting_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderFirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderFirstName') + value_ = self.gds_validate_string(value_, node, 'SenderFirstName') + self.SenderFirstName = value_ + self.SenderFirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderLastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderLastName') + value_ = self.gds_validate_string(value_, node, 'SenderLastName') + self.SenderLastName = value_ + self.SenderLastName_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderBusinessName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderBusinessName') + value_ = self.gds_validate_string(value_, node, 'SenderBusinessName') + self.SenderBusinessName = value_ + self.SenderBusinessName_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderAddress1') + value_ = self.gds_validate_string(value_, node, 'SenderAddress1') + self.SenderAddress1 = value_ + self.SenderAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderCity') + value_ = self.gds_validate_string(value_, node, 'SenderCity') + self.SenderCity = value_ + self.SenderCity_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderState') + value_ = self.gds_validate_string(value_, node, 'SenderState') + self.SenderState = value_ + self.SenderState_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderZip5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderZip5') + value_ = self.gds_validate_string(value_, node, 'SenderZip5') + self.SenderZip5 = value_ + self.SenderZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderPhone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderPhone') + value_ = self.gds_validate_string(value_, node, 'SenderPhone') + self.SenderPhone = value_ + self.SenderPhone_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderEmail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderEmail') + value_ = self.gds_validate_string(value_, node, 'SenderEmail') + self.SenderEmail = value_ + self.SenderEmail_nsprefix_ = child_.prefix + elif nodeName_ == 'ChargebackCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ChargebackCode') + value_ = self.gds_validate_string(value_, node, 'ChargebackCode') + self.ChargebackCode = value_ + self.ChargebackCode_nsprefix_ = child_.prefix +# end class eVSPriorityMailIntlRequest + + +class ImageParametersType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ImageParameter=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ImageParameter = ImageParameter + self.ImageParameter_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ImageParametersType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ImageParametersType.subclass: + return ImageParametersType.subclass(*args_, **kwargs_) + else: + return ImageParametersType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ImageParameter(self): + return self.ImageParameter + def set_ImageParameter(self, ImageParameter): + self.ImageParameter = ImageParameter + def has__content(self): + if ( + self.ImageParameter is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ImageParametersType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ImageParametersType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ImageParametersType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ImageParametersType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ImageParametersType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ImageParametersType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ImageParametersType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ImageParameter is not None: + namespaceprefix_ = self.ImageParameter_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageParameter_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageParameter>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageParameter), input_name='ImageParameter')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ImageParameter': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageParameter') + value_ = self.gds_validate_string(value_, node, 'ImageParameter') + self.ImageParameter = value_ + self.ImageParameter_nsprefix_ = child_.prefix +# end class ImageParametersType + + +class ShippingContentsType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ItemDetail=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if ItemDetail is None: + self.ItemDetail = [] + else: + self.ItemDetail = ItemDetail + self.ItemDetail_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ShippingContentsType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ShippingContentsType.subclass: + return ShippingContentsType.subclass(*args_, **kwargs_) + else: + return ShippingContentsType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ItemDetail(self): + return self.ItemDetail + def set_ItemDetail(self, ItemDetail): + self.ItemDetail = ItemDetail + def add_ItemDetail(self, value): + self.ItemDetail.append(value) + def insert_ItemDetail_at(self, index, value): + self.ItemDetail.insert(index, value) + def replace_ItemDetail_at(self, index, value): + self.ItemDetail[index] = value + def has__content(self): + if ( + self.ItemDetail + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ShippingContentsType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ShippingContentsType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ShippingContentsType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ShippingContentsType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ShippingContentsType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ShippingContentsType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ShippingContentsType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for ItemDetail_ in self.ItemDetail: + namespaceprefix_ = self.ItemDetail_nsprefix_ + ':' if (UseCapturedNS_ and self.ItemDetail_nsprefix_) else '' + ItemDetail_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ItemDetail', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ItemDetail': + obj_ = ItemDetailType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ItemDetail.append(obj_) + obj_.original_tagname_ = 'ItemDetail' +# end class ShippingContentsType + + +class ItemDetailType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Description=None, Quantity=None, Value=None, NetPounds=None, NetOunces=None, HSTariffNumber=None, CountryOfOrigin=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Description = Description + self.Description_nsprefix_ = None + self.Quantity = Quantity + self.Quantity_nsprefix_ = None + self.Value = Value + self.Value_nsprefix_ = None + self.NetPounds = NetPounds + self.NetPounds_nsprefix_ = None + self.NetOunces = NetOunces + self.NetOunces_nsprefix_ = None + self.HSTariffNumber = HSTariffNumber + self.HSTariffNumber_nsprefix_ = None + self.CountryOfOrigin = CountryOfOrigin + self.CountryOfOrigin_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ItemDetailType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ItemDetailType.subclass: + return ItemDetailType.subclass(*args_, **kwargs_) + else: + return ItemDetailType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Description(self): + return self.Description + def set_Description(self, Description): + self.Description = Description + def get_Quantity(self): + return self.Quantity + def set_Quantity(self, Quantity): + self.Quantity = Quantity + def get_Value(self): + return self.Value + def set_Value(self, Value): + self.Value = Value + def get_NetPounds(self): + return self.NetPounds + def set_NetPounds(self, NetPounds): + self.NetPounds = NetPounds + def get_NetOunces(self): + return self.NetOunces + def set_NetOunces(self, NetOunces): + self.NetOunces = NetOunces + def get_HSTariffNumber(self): + return self.HSTariffNumber + def set_HSTariffNumber(self, HSTariffNumber): + self.HSTariffNumber = HSTariffNumber + def get_CountryOfOrigin(self): + return self.CountryOfOrigin + def set_CountryOfOrigin(self, CountryOfOrigin): + self.CountryOfOrigin = CountryOfOrigin + def has__content(self): + if ( + self.Description is not None or + self.Quantity is not None or + self.Value is not None or + self.NetPounds is not None or + self.NetOunces is not None or + self.HSTariffNumber is not None or + self.CountryOfOrigin is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ItemDetailType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ItemDetailType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ItemDetailType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ItemDetailType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ItemDetailType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ItemDetailType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ItemDetailType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Description is not None: + namespaceprefix_ = self.Description_nsprefix_ + ':' if (UseCapturedNS_ and self.Description_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDescription>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Description), input_name='Description')), namespaceprefix_ , eol_)) + if self.Quantity is not None: + namespaceprefix_ = self.Quantity_nsprefix_ + ':' if (UseCapturedNS_ and self.Quantity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sQuantity>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Quantity, input_name='Quantity'), namespaceprefix_ , eol_)) + if self.Value is not None: + namespaceprefix_ = self.Value_nsprefix_ + ':' if (UseCapturedNS_ and self.Value_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sValue>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Value, input_name='Value'), namespaceprefix_ , eol_)) + if self.NetPounds is not None: + namespaceprefix_ = self.NetPounds_nsprefix_ + ':' if (UseCapturedNS_ and self.NetPounds_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNetPounds>%s%s' % (namespaceprefix_ , self.gds_format_float(self.NetPounds, input_name='NetPounds'), namespaceprefix_ , eol_)) + if self.NetOunces is not None: + namespaceprefix_ = self.NetOunces_nsprefix_ + ':' if (UseCapturedNS_ and self.NetOunces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNetOunces>%s%s' % (namespaceprefix_ , self.gds_format_float(self.NetOunces, input_name='NetOunces'), namespaceprefix_ , eol_)) + if self.HSTariffNumber is not None: + namespaceprefix_ = self.HSTariffNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.HSTariffNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHSTariffNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.HSTariffNumber), input_name='HSTariffNumber')), namespaceprefix_ , eol_)) + if self.CountryOfOrigin is not None: + namespaceprefix_ = self.CountryOfOrigin_nsprefix_ + ':' if (UseCapturedNS_ and self.CountryOfOrigin_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCountryOfOrigin>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CountryOfOrigin), input_name='CountryOfOrigin')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Description': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Description') + value_ = self.gds_validate_string(value_, node, 'Description') + self.Description = value_ + self.Description_nsprefix_ = child_.prefix + elif nodeName_ == 'Quantity' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Quantity') + ival_ = self.gds_validate_integer(ival_, node, 'Quantity') + self.Quantity = ival_ + self.Quantity_nsprefix_ = child_.prefix + elif nodeName_ == 'Value' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Value') + fval_ = self.gds_validate_float(fval_, node, 'Value') + self.Value = fval_ + self.Value_nsprefix_ = child_.prefix + elif nodeName_ == 'NetPounds' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'NetPounds') + fval_ = self.gds_validate_float(fval_, node, 'NetPounds') + self.NetPounds = fval_ + self.NetPounds_nsprefix_ = child_.prefix + elif nodeName_ == 'NetOunces' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'NetOunces') + fval_ = self.gds_validate_float(fval_, node, 'NetOunces') + self.NetOunces = fval_ + self.NetOunces_nsprefix_ = child_.prefix + elif nodeName_ == 'HSTariffNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'HSTariffNumber') + value_ = self.gds_validate_string(value_, node, 'HSTariffNumber') + self.HSTariffNumber = value_ + self.HSTariffNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'CountryOfOrigin': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CountryOfOrigin') + value_ = self.gds_validate_string(value_, node, 'CountryOfOrigin') + self.CountryOfOrigin = value_ + self.CountryOfOrigin_nsprefix_ = child_.prefix +# end class ItemDetailType + + +class ExtraServicesType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ExtraService=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if ExtraService is None: + self.ExtraService = [] + else: + self.ExtraService = ExtraService + self.ExtraService_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExtraServicesType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExtraServicesType.subclass: + return ExtraServicesType.subclass(*args_, **kwargs_) + else: + return ExtraServicesType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ExtraService(self): + return self.ExtraService + def set_ExtraService(self, ExtraService): + self.ExtraService = ExtraService + def add_ExtraService(self, value): + self.ExtraService.append(value) + def insert_ExtraService_at(self, index, value): + self.ExtraService.insert(index, value) + def replace_ExtraService_at(self, index, value): + self.ExtraService[index] = value + def has__content(self): + if ( + self.ExtraService + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExtraServicesType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExtraServicesType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtraServicesType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtraServicesType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtraServicesType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for ExtraService_ in self.ExtraService: + namespaceprefix_ = self.ExtraService_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraService_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sExtraService>%s%s' % (namespaceprefix_ , self.gds_format_integer(ExtraService_, input_name='ExtraService'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ExtraService' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ExtraService') + ival_ = self.gds_validate_integer(ival_, node, 'ExtraService') + self.ExtraService.append(ival_) + self.ExtraService_nsprefix_ = child_.prefix +# end class ExtraServicesType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSPriorityMailIntlRequest' + rootClass = eVSPriorityMailIntlRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSPriorityMailIntlRequest' + rootClass = eVSPriorityMailIntlRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSPriorityMailIntlRequest' + rootClass = eVSPriorityMailIntlRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSPriorityMailIntlRequest' + rootClass = eVSPriorityMailIntlRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from evs_priority_mail_intl_request import *\n\n') + sys.stdout.write('import evs_priority_mail_intl_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "ExtraServicesType", + "ImageParametersType", + "ItemDetailType", + "ShippingContentsType", + "eVSPriorityMailIntlRequest" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/evs_priority_mail_intl_response.py b/modules/connectors/usps/karrio/schemas/usps/evs_priority_mail_intl_response.py new file mode 100644 index 0000000000..fe822d9ca0 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/evs_priority_mail_intl_response.py @@ -0,0 +1,1831 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:50 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/evs_priority_mail_intl_response.py') +# +# Command line arguments: +# ./schemas/eVSPriorityMailIntlResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/evs_priority_mail_intl_response.py" ./schemas/eVSPriorityMailIntlResponse.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class eVSPriorityMailIntlResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Postage=None, TotalValue=None, SDRValue=None, BarcodeNumber=None, LabelImage=None, Page2Image=None, Page3Image=None, Page4Image=None, Page5Image=None, Page6Image=None, Prohibitions=None, Restrictions=None, Observations=None, Regulations=None, AdditionalRestrictions=None, ParcelIndemnityCoverage=None, ExtraServices=None, RemainingBarcodes=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Postage = Postage + self.Postage_nsprefix_ = None + self.TotalValue = TotalValue + self.TotalValue_nsprefix_ = None + self.SDRValue = SDRValue + self.SDRValue_nsprefix_ = None + self.BarcodeNumber = BarcodeNumber + self.BarcodeNumber_nsprefix_ = None + self.LabelImage = LabelImage + self.LabelImage_nsprefix_ = None + self.Page2Image = Page2Image + self.Page2Image_nsprefix_ = None + self.Page3Image = Page3Image + self.Page3Image_nsprefix_ = None + self.Page4Image = Page4Image + self.Page4Image_nsprefix_ = None + self.Page5Image = Page5Image + self.Page5Image_nsprefix_ = None + self.Page6Image = Page6Image + self.Page6Image_nsprefix_ = None + self.Prohibitions = Prohibitions + self.Prohibitions_nsprefix_ = None + self.Restrictions = Restrictions + self.Restrictions_nsprefix_ = None + self.Observations = Observations + self.Observations_nsprefix_ = None + self.Regulations = Regulations + self.Regulations_nsprefix_ = None + self.AdditionalRestrictions = AdditionalRestrictions + self.AdditionalRestrictions_nsprefix_ = None + self.ParcelIndemnityCoverage = ParcelIndemnityCoverage + self.ParcelIndemnityCoverage_nsprefix_ = None + self.ExtraServices = ExtraServices + self.ExtraServices_nsprefix_ = None + self.RemainingBarcodes = RemainingBarcodes + self.RemainingBarcodes_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, eVSPriorityMailIntlResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if eVSPriorityMailIntlResponse.subclass: + return eVSPriorityMailIntlResponse.subclass(*args_, **kwargs_) + else: + return eVSPriorityMailIntlResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Postage(self): + return self.Postage + def set_Postage(self, Postage): + self.Postage = Postage + def get_TotalValue(self): + return self.TotalValue + def set_TotalValue(self, TotalValue): + self.TotalValue = TotalValue + def get_SDRValue(self): + return self.SDRValue + def set_SDRValue(self, SDRValue): + self.SDRValue = SDRValue + def get_BarcodeNumber(self): + return self.BarcodeNumber + def set_BarcodeNumber(self, BarcodeNumber): + self.BarcodeNumber = BarcodeNumber + def get_LabelImage(self): + return self.LabelImage + def set_LabelImage(self, LabelImage): + self.LabelImage = LabelImage + def get_Page2Image(self): + return self.Page2Image + def set_Page2Image(self, Page2Image): + self.Page2Image = Page2Image + def get_Page3Image(self): + return self.Page3Image + def set_Page3Image(self, Page3Image): + self.Page3Image = Page3Image + def get_Page4Image(self): + return self.Page4Image + def set_Page4Image(self, Page4Image): + self.Page4Image = Page4Image + def get_Page5Image(self): + return self.Page5Image + def set_Page5Image(self, Page5Image): + self.Page5Image = Page5Image + def get_Page6Image(self): + return self.Page6Image + def set_Page6Image(self, Page6Image): + self.Page6Image = Page6Image + def get_Prohibitions(self): + return self.Prohibitions + def set_Prohibitions(self, Prohibitions): + self.Prohibitions = Prohibitions + def get_Restrictions(self): + return self.Restrictions + def set_Restrictions(self, Restrictions): + self.Restrictions = Restrictions + def get_Observations(self): + return self.Observations + def set_Observations(self, Observations): + self.Observations = Observations + def get_Regulations(self): + return self.Regulations + def set_Regulations(self, Regulations): + self.Regulations = Regulations + def get_AdditionalRestrictions(self): + return self.AdditionalRestrictions + def set_AdditionalRestrictions(self, AdditionalRestrictions): + self.AdditionalRestrictions = AdditionalRestrictions + def get_ParcelIndemnityCoverage(self): + return self.ParcelIndemnityCoverage + def set_ParcelIndemnityCoverage(self, ParcelIndemnityCoverage): + self.ParcelIndemnityCoverage = ParcelIndemnityCoverage + def get_ExtraServices(self): + return self.ExtraServices + def set_ExtraServices(self, ExtraServices): + self.ExtraServices = ExtraServices + def get_RemainingBarcodes(self): + return self.RemainingBarcodes + def set_RemainingBarcodes(self, RemainingBarcodes): + self.RemainingBarcodes = RemainingBarcodes + def has__content(self): + if ( + self.Postage is not None or + self.TotalValue is not None or + self.SDRValue is not None or + self.BarcodeNumber is not None or + self.LabelImage is not None or + self.Page2Image is not None or + self.Page3Image is not None or + self.Page4Image is not None or + self.Page5Image is not None or + self.Page6Image is not None or + self.Prohibitions is not None or + self.Restrictions is not None or + self.Observations is not None or + self.Regulations is not None or + self.AdditionalRestrictions is not None or + self.ParcelIndemnityCoverage is not None or + self.ExtraServices is not None or + self.RemainingBarcodes is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSPriorityMailIntlResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('eVSPriorityMailIntlResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'eVSPriorityMailIntlResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='eVSPriorityMailIntlResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='eVSPriorityMailIntlResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='eVSPriorityMailIntlResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSPriorityMailIntlResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Postage is not None: + namespaceprefix_ = self.Postage_nsprefix_ + ':' if (UseCapturedNS_ and self.Postage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPostage>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Postage, input_name='Postage'), namespaceprefix_ , eol_)) + if self.TotalValue is not None: + namespaceprefix_ = self.TotalValue_nsprefix_ + ':' if (UseCapturedNS_ and self.TotalValue_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTotalValue>%s%s' % (namespaceprefix_ , self.gds_format_float(self.TotalValue, input_name='TotalValue'), namespaceprefix_ , eol_)) + if self.SDRValue is not None: + namespaceprefix_ = self.SDRValue_nsprefix_ + ':' if (UseCapturedNS_ and self.SDRValue_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSDRValue>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SDRValue), input_name='SDRValue')), namespaceprefix_ , eol_)) + if self.BarcodeNumber is not None: + namespaceprefix_ = self.BarcodeNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.BarcodeNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBarcodeNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BarcodeNumber), input_name='BarcodeNumber')), namespaceprefix_ , eol_)) + if self.LabelImage is not None: + namespaceprefix_ = self.LabelImage_nsprefix_ + ':' if (UseCapturedNS_ and self.LabelImage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLabelImage>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LabelImage), input_name='LabelImage')), namespaceprefix_ , eol_)) + if self.Page2Image is not None: + namespaceprefix_ = self.Page2Image_nsprefix_ + ':' if (UseCapturedNS_ and self.Page2Image_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPage2Image>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Page2Image), input_name='Page2Image')), namespaceprefix_ , eol_)) + if self.Page3Image is not None: + namespaceprefix_ = self.Page3Image_nsprefix_ + ':' if (UseCapturedNS_ and self.Page3Image_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPage3Image>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Page3Image), input_name='Page3Image')), namespaceprefix_ , eol_)) + if self.Page4Image is not None: + namespaceprefix_ = self.Page4Image_nsprefix_ + ':' if (UseCapturedNS_ and self.Page4Image_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPage4Image>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Page4Image), input_name='Page4Image')), namespaceprefix_ , eol_)) + if self.Page5Image is not None: + namespaceprefix_ = self.Page5Image_nsprefix_ + ':' if (UseCapturedNS_ and self.Page5Image_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPage5Image>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Page5Image), input_name='Page5Image')), namespaceprefix_ , eol_)) + if self.Page6Image is not None: + namespaceprefix_ = self.Page6Image_nsprefix_ + ':' if (UseCapturedNS_ and self.Page6Image_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPage6Image>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Page6Image), input_name='Page6Image')), namespaceprefix_ , eol_)) + if self.Prohibitions is not None: + namespaceprefix_ = self.Prohibitions_nsprefix_ + ':' if (UseCapturedNS_ and self.Prohibitions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sProhibitions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Prohibitions), input_name='Prohibitions')), namespaceprefix_ , eol_)) + if self.Restrictions is not None: + namespaceprefix_ = self.Restrictions_nsprefix_ + ':' if (UseCapturedNS_ and self.Restrictions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRestrictions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Restrictions), input_name='Restrictions')), namespaceprefix_ , eol_)) + if self.Observations is not None: + namespaceprefix_ = self.Observations_nsprefix_ + ':' if (UseCapturedNS_ and self.Observations_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sObservations>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Observations), input_name='Observations')), namespaceprefix_ , eol_)) + if self.Regulations is not None: + namespaceprefix_ = self.Regulations_nsprefix_ + ':' if (UseCapturedNS_ and self.Regulations_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRegulations>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Regulations), input_name='Regulations')), namespaceprefix_ , eol_)) + if self.AdditionalRestrictions is not None: + namespaceprefix_ = self.AdditionalRestrictions_nsprefix_ + ':' if (UseCapturedNS_ and self.AdditionalRestrictions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAdditionalRestrictions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AdditionalRestrictions), input_name='AdditionalRestrictions')), namespaceprefix_ , eol_)) + if self.ParcelIndemnityCoverage is not None: + namespaceprefix_ = self.ParcelIndemnityCoverage_nsprefix_ + ':' if (UseCapturedNS_ and self.ParcelIndemnityCoverage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sParcelIndemnityCoverage>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ParcelIndemnityCoverage, input_name='ParcelIndemnityCoverage'), namespaceprefix_ , eol_)) + if self.ExtraServices is not None: + namespaceprefix_ = self.ExtraServices_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraServices_nsprefix_) else '' + self.ExtraServices.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ExtraServices', pretty_print=pretty_print) + if self.RemainingBarcodes is not None: + namespaceprefix_ = self.RemainingBarcodes_nsprefix_ + ':' if (UseCapturedNS_ and self.RemainingBarcodes_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRemainingBarcodes>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.RemainingBarcodes, input_name='RemainingBarcodes'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Postage' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Postage') + fval_ = self.gds_validate_float(fval_, node, 'Postage') + self.Postage = fval_ + self.Postage_nsprefix_ = child_.prefix + elif nodeName_ == 'TotalValue' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'TotalValue') + fval_ = self.gds_validate_float(fval_, node, 'TotalValue') + self.TotalValue = fval_ + self.TotalValue_nsprefix_ = child_.prefix + elif nodeName_ == 'SDRValue': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SDRValue') + value_ = self.gds_validate_string(value_, node, 'SDRValue') + self.SDRValue = value_ + self.SDRValue_nsprefix_ = child_.prefix + elif nodeName_ == 'BarcodeNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BarcodeNumber') + value_ = self.gds_validate_string(value_, node, 'BarcodeNumber') + self.BarcodeNumber = value_ + self.BarcodeNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'LabelImage': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LabelImage') + value_ = self.gds_validate_string(value_, node, 'LabelImage') + self.LabelImage = value_ + self.LabelImage_nsprefix_ = child_.prefix + elif nodeName_ == 'Page2Image': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Page2Image') + value_ = self.gds_validate_string(value_, node, 'Page2Image') + self.Page2Image = value_ + self.Page2Image_nsprefix_ = child_.prefix + elif nodeName_ == 'Page3Image': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Page3Image') + value_ = self.gds_validate_string(value_, node, 'Page3Image') + self.Page3Image = value_ + self.Page3Image_nsprefix_ = child_.prefix + elif nodeName_ == 'Page4Image': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Page4Image') + value_ = self.gds_validate_string(value_, node, 'Page4Image') + self.Page4Image = value_ + self.Page4Image_nsprefix_ = child_.prefix + elif nodeName_ == 'Page5Image': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Page5Image') + value_ = self.gds_validate_string(value_, node, 'Page5Image') + self.Page5Image = value_ + self.Page5Image_nsprefix_ = child_.prefix + elif nodeName_ == 'Page6Image': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Page6Image') + value_ = self.gds_validate_string(value_, node, 'Page6Image') + self.Page6Image = value_ + self.Page6Image_nsprefix_ = child_.prefix + elif nodeName_ == 'Prohibitions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Prohibitions') + value_ = self.gds_validate_string(value_, node, 'Prohibitions') + self.Prohibitions = value_ + self.Prohibitions_nsprefix_ = child_.prefix + elif nodeName_ == 'Restrictions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Restrictions') + value_ = self.gds_validate_string(value_, node, 'Restrictions') + self.Restrictions = value_ + self.Restrictions_nsprefix_ = child_.prefix + elif nodeName_ == 'Observations': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Observations') + value_ = self.gds_validate_string(value_, node, 'Observations') + self.Observations = value_ + self.Observations_nsprefix_ = child_.prefix + elif nodeName_ == 'Regulations': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Regulations') + value_ = self.gds_validate_string(value_, node, 'Regulations') + self.Regulations = value_ + self.Regulations_nsprefix_ = child_.prefix + elif nodeName_ == 'AdditionalRestrictions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AdditionalRestrictions') + value_ = self.gds_validate_string(value_, node, 'AdditionalRestrictions') + self.AdditionalRestrictions = value_ + self.AdditionalRestrictions_nsprefix_ = child_.prefix + elif nodeName_ == 'ParcelIndemnityCoverage' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'ParcelIndemnityCoverage') + fval_ = self.gds_validate_float(fval_, node, 'ParcelIndemnityCoverage') + self.ParcelIndemnityCoverage = fval_ + self.ParcelIndemnityCoverage_nsprefix_ = child_.prefix + elif nodeName_ == 'ExtraServices': + obj_ = ExtraServicesType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ExtraServices = obj_ + obj_.original_tagname_ = 'ExtraServices' + elif nodeName_ == 'RemainingBarcodes' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'RemainingBarcodes') + ival_ = self.gds_validate_integer(ival_, node, 'RemainingBarcodes') + self.RemainingBarcodes = ival_ + self.RemainingBarcodes_nsprefix_ = child_.prefix +# end class eVSPriorityMailIntlResponse + + +class ExtraServicesType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ExtraService=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if ExtraService is None: + self.ExtraService = [] + else: + self.ExtraService = ExtraService + self.ExtraService_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExtraServicesType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExtraServicesType.subclass: + return ExtraServicesType.subclass(*args_, **kwargs_) + else: + return ExtraServicesType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ExtraService(self): + return self.ExtraService + def set_ExtraService(self, ExtraService): + self.ExtraService = ExtraService + def add_ExtraService(self, value): + self.ExtraService.append(value) + def insert_ExtraService_at(self, index, value): + self.ExtraService.insert(index, value) + def replace_ExtraService_at(self, index, value): + self.ExtraService[index] = value + def has__content(self): + if ( + self.ExtraService + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExtraServicesType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExtraServicesType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtraServicesType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtraServicesType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtraServicesType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for ExtraService_ in self.ExtraService: + namespaceprefix_ = self.ExtraService_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraService_nsprefix_) else '' + ExtraService_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ExtraService', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ExtraService': + obj_ = ExtraServiceType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ExtraService.append(obj_) + obj_.original_tagname_ = 'ExtraService' +# end class ExtraServicesType + + +class ExtraServiceType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ServiceID=None, ServiceName=None, Price=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ServiceID = ServiceID + self.ServiceID_nsprefix_ = None + self.ServiceName = ServiceName + self.ServiceName_nsprefix_ = None + self.Price = Price + self.Price_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExtraServiceType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExtraServiceType.subclass: + return ExtraServiceType.subclass(*args_, **kwargs_) + else: + return ExtraServiceType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ServiceID(self): + return self.ServiceID + def set_ServiceID(self, ServiceID): + self.ServiceID = ServiceID + def get_ServiceName(self): + return self.ServiceName + def set_ServiceName(self, ServiceName): + self.ServiceName = ServiceName + def get_Price(self): + return self.Price + def set_Price(self, Price): + self.Price = Price + def has__content(self): + if ( + self.ServiceID is not None or + self.ServiceName is not None or + self.Price is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServiceType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExtraServiceType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExtraServiceType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtraServiceType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtraServiceType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtraServiceType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServiceType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ServiceID is not None: + namespaceprefix_ = self.ServiceID_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceID>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ServiceID, input_name='ServiceID'), namespaceprefix_ , eol_)) + if self.ServiceName is not None: + namespaceprefix_ = self.ServiceName_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceName), input_name='ServiceName')), namespaceprefix_ , eol_)) + if self.Price is not None: + namespaceprefix_ = self.Price_nsprefix_ + ':' if (UseCapturedNS_ and self.Price_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPrice>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Price, input_name='Price'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ServiceID' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ServiceID') + ival_ = self.gds_validate_integer(ival_, node, 'ServiceID') + self.ServiceID = ival_ + self.ServiceID_nsprefix_ = child_.prefix + elif nodeName_ == 'ServiceName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceName') + value_ = self.gds_validate_string(value_, node, 'ServiceName') + self.ServiceName = value_ + self.ServiceName_nsprefix_ = child_.prefix + elif nodeName_ == 'Price' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Price') + fval_ = self.gds_validate_float(fval_, node, 'Price') + self.Price = fval_ + self.Price_nsprefix_ = child_.prefix +# end class ExtraServiceType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSPriorityMailIntlResponse' + rootClass = eVSPriorityMailIntlResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSPriorityMailIntlResponse' + rootClass = eVSPriorityMailIntlResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSPriorityMailIntlResponse' + rootClass = eVSPriorityMailIntlResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSPriorityMailIntlResponse' + rootClass = eVSPriorityMailIntlResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from evs_priority_mail_intl_response import *\n\n') + sys.stdout.write('import evs_priority_mail_intl_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "ExtraServiceType", + "ExtraServicesType", + "eVSPriorityMailIntlResponse" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/evs_request.py b/modules/connectors/usps/karrio/schemas/usps/evs_request.py new file mode 100644 index 0000000000..ef01d0957b --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/evs_request.py @@ -0,0 +1,4034 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:47 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/evs_request.py') +# +# Command line arguments: +# ./schemas/eVSRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/evs_request.py" ./schemas/eVSRequest.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class eVSRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, Option=None, Revision=None, ImageParameters=None, FromName=None, FromFirm=None, FromAddress1=None, FromAddress2=None, FromCity=None, FromState=None, FromZip5=None, FromZip4=None, FromPhone=None, POZipCode=None, AllowNonCleansedOriginAddr=None, ToName=None, ToFirm=None, ToAddress1=None, ToAddress2=None, ToCity=None, ToState=None, ToZip5=None, ToZip4=None, ToPhone=None, POBox=None, ToContactPreference=None, ToContactMessaging=None, ToContactEmail=None, AllowNonCleansedDestAddr=None, WeightInOunces=None, ServiceType=None, Container=None, Width=None, Length=None, Height=None, Girth=None, Machinable=None, ProcessingCategory=None, PriceOptions=None, InsuredAmount=None, AddressServiceRequested=None, ExpressMailOptions=None, ShipDate=None, CustomerRefNo=None, CustomerRefNo2=None, ExtraServices=None, HoldForPickup=None, OpenDistribute=None, PermitNumber=None, PermitZIPCode=None, PermitHolderName=None, CRID=None, MID=None, LogisticsManagerMID=None, VendorCode=None, VendorProductVersionNumber=None, SenderName=None, SenderEMail=None, RecipientName=None, RecipientEMail=None, ReceiptOption=None, ImageType=None, HoldForManifest=None, NineDigitRoutingZip=None, ShipInfo=None, CarrierRelease=None, DropOffTime=None, ReturnCommitments=None, PrintCustomerRefNo=None, Content=None, ActionCode=None, OptOutOfSPE=None, SortationLevel=None, DestinationEntryFacilityType=None, ShippingContents=None, CustomsContentType=None, ContentComments=None, RestrictionType=None, RestrictionComments=None, AESITN=None, ImportersReference=None, ImportersContact=None, ExportersReference=None, ExportersContact=None, InvoiceNumber=None, LicenseNumber=None, CertificateNumber=None, NonDeliveryOption=None, AltReturnAddress1=None, AltReturnAddress2=None, AltReturnAddress3=None, AltReturnAddress4=None, AltReturnAddress5=None, AltReturnAddress6=None, AltReturnCountry=None, LabelImportType=None, ePostageMailerReporting=None, SenderFirstName=None, SenderLastName=None, SenderBusinessName=None, SenderAddress1=None, SenderCity=None, SenderState=None, SenderZip5=None, SenderPhone=None, ChargebackCode=None, TrackingRetentionPeriod=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.Option = Option + self.Option_nsprefix_ = None + self.Revision = Revision + self.Revision_nsprefix_ = None + self.ImageParameters = ImageParameters + self.ImageParameters_nsprefix_ = None + self.FromName = FromName + self.FromName_nsprefix_ = None + self.FromFirm = FromFirm + self.FromFirm_nsprefix_ = None + self.FromAddress1 = FromAddress1 + self.FromAddress1_nsprefix_ = None + self.FromAddress2 = FromAddress2 + self.FromAddress2_nsprefix_ = None + self.FromCity = FromCity + self.FromCity_nsprefix_ = None + self.FromState = FromState + self.FromState_nsprefix_ = None + self.FromZip5 = FromZip5 + self.FromZip5_nsprefix_ = None + self.FromZip4 = FromZip4 + self.FromZip4_nsprefix_ = None + self.FromPhone = FromPhone + self.FromPhone_nsprefix_ = None + self.POZipCode = POZipCode + self.POZipCode_nsprefix_ = None + self.AllowNonCleansedOriginAddr = AllowNonCleansedOriginAddr + self.AllowNonCleansedOriginAddr_nsprefix_ = None + self.ToName = ToName + self.ToName_nsprefix_ = None + self.ToFirm = ToFirm + self.ToFirm_nsprefix_ = None + self.ToAddress1 = ToAddress1 + self.ToAddress1_nsprefix_ = None + self.ToAddress2 = ToAddress2 + self.ToAddress2_nsprefix_ = None + self.ToCity = ToCity + self.ToCity_nsprefix_ = None + self.ToState = ToState + self.ToState_nsprefix_ = None + self.ToZip5 = ToZip5 + self.ToZip5_nsprefix_ = None + self.ToZip4 = ToZip4 + self.ToZip4_nsprefix_ = None + self.ToPhone = ToPhone + self.ToPhone_nsprefix_ = None + self.POBox = POBox + self.POBox_nsprefix_ = None + self.ToContactPreference = ToContactPreference + self.ToContactPreference_nsprefix_ = None + self.ToContactMessaging = ToContactMessaging + self.ToContactMessaging_nsprefix_ = None + self.ToContactEmail = ToContactEmail + self.ToContactEmail_nsprefix_ = None + self.AllowNonCleansedDestAddr = AllowNonCleansedDestAddr + self.AllowNonCleansedDestAddr_nsprefix_ = None + self.WeightInOunces = WeightInOunces + self.WeightInOunces_nsprefix_ = None + self.ServiceType = ServiceType + self.ServiceType_nsprefix_ = None + self.Container = Container + self.Container_nsprefix_ = None + self.Width = Width + self.Width_nsprefix_ = None + self.Length = Length + self.Length_nsprefix_ = None + self.Height = Height + self.Height_nsprefix_ = None + self.Girth = Girth + self.Girth_nsprefix_ = None + self.Machinable = Machinable + self.Machinable_nsprefix_ = None + self.ProcessingCategory = ProcessingCategory + self.ProcessingCategory_nsprefix_ = None + self.PriceOptions = PriceOptions + self.PriceOptions_nsprefix_ = None + self.InsuredAmount = InsuredAmount + self.InsuredAmount_nsprefix_ = None + self.AddressServiceRequested = AddressServiceRequested + self.AddressServiceRequested_nsprefix_ = None + self.ExpressMailOptions = ExpressMailOptions + self.ExpressMailOptions_nsprefix_ = None + self.ShipDate = ShipDate + self.validate_ShipDateType(self.ShipDate) + self.ShipDate_nsprefix_ = None + self.CustomerRefNo = CustomerRefNo + self.CustomerRefNo_nsprefix_ = None + self.CustomerRefNo2 = CustomerRefNo2 + self.CustomerRefNo2_nsprefix_ = None + self.ExtraServices = ExtraServices + self.ExtraServices_nsprefix_ = None + self.HoldForPickup = HoldForPickup + self.HoldForPickup_nsprefix_ = None + self.OpenDistribute = OpenDistribute + self.OpenDistribute_nsprefix_ = None + self.PermitNumber = PermitNumber + self.PermitNumber_nsprefix_ = None + self.PermitZIPCode = PermitZIPCode + self.PermitZIPCode_nsprefix_ = None + self.PermitHolderName = PermitHolderName + self.PermitHolderName_nsprefix_ = None + self.CRID = CRID + self.CRID_nsprefix_ = None + self.MID = MID + self.MID_nsprefix_ = None + self.LogisticsManagerMID = LogisticsManagerMID + self.LogisticsManagerMID_nsprefix_ = None + self.VendorCode = VendorCode + self.VendorCode_nsprefix_ = None + self.VendorProductVersionNumber = VendorProductVersionNumber + self.VendorProductVersionNumber_nsprefix_ = None + self.SenderName = SenderName + self.SenderName_nsprefix_ = None + self.SenderEMail = SenderEMail + self.SenderEMail_nsprefix_ = None + self.RecipientName = RecipientName + self.RecipientName_nsprefix_ = None + self.RecipientEMail = RecipientEMail + self.RecipientEMail_nsprefix_ = None + self.ReceiptOption = ReceiptOption + self.ReceiptOption_nsprefix_ = None + self.ImageType = ImageType + self.ImageType_nsprefix_ = None + self.HoldForManifest = HoldForManifest + self.HoldForManifest_nsprefix_ = None + self.NineDigitRoutingZip = NineDigitRoutingZip + self.NineDigitRoutingZip_nsprefix_ = None + self.ShipInfo = ShipInfo + self.ShipInfo_nsprefix_ = None + self.CarrierRelease = CarrierRelease + self.CarrierRelease_nsprefix_ = None + self.DropOffTime = DropOffTime + self.DropOffTime_nsprefix_ = None + self.ReturnCommitments = ReturnCommitments + self.ReturnCommitments_nsprefix_ = None + self.PrintCustomerRefNo = PrintCustomerRefNo + self.PrintCustomerRefNo_nsprefix_ = None + self.Content = Content + self.Content_nsprefix_ = None + self.ActionCode = ActionCode + self.ActionCode_nsprefix_ = None + self.OptOutOfSPE = OptOutOfSPE + self.OptOutOfSPE_nsprefix_ = None + self.SortationLevel = SortationLevel + self.SortationLevel_nsprefix_ = None + self.DestinationEntryFacilityType = DestinationEntryFacilityType + self.DestinationEntryFacilityType_nsprefix_ = None + self.ShippingContents = ShippingContents + self.ShippingContents_nsprefix_ = None + self.CustomsContentType = CustomsContentType + self.CustomsContentType_nsprefix_ = None + self.ContentComments = ContentComments + self.ContentComments_nsprefix_ = None + self.RestrictionType = RestrictionType + self.RestrictionType_nsprefix_ = None + self.RestrictionComments = RestrictionComments + self.RestrictionComments_nsprefix_ = None + self.AESITN = AESITN + self.AESITN_nsprefix_ = None + self.ImportersReference = ImportersReference + self.ImportersReference_nsprefix_ = None + self.ImportersContact = ImportersContact + self.ImportersContact_nsprefix_ = None + self.ExportersReference = ExportersReference + self.ExportersReference_nsprefix_ = None + self.ExportersContact = ExportersContact + self.ExportersContact_nsprefix_ = None + self.InvoiceNumber = InvoiceNumber + self.InvoiceNumber_nsprefix_ = None + self.LicenseNumber = LicenseNumber + self.LicenseNumber_nsprefix_ = None + self.CertificateNumber = CertificateNumber + self.CertificateNumber_nsprefix_ = None + self.NonDeliveryOption = NonDeliveryOption + self.NonDeliveryOption_nsprefix_ = None + self.AltReturnAddress1 = AltReturnAddress1 + self.AltReturnAddress1_nsprefix_ = None + self.AltReturnAddress2 = AltReturnAddress2 + self.AltReturnAddress2_nsprefix_ = None + self.AltReturnAddress3 = AltReturnAddress3 + self.AltReturnAddress3_nsprefix_ = None + self.AltReturnAddress4 = AltReturnAddress4 + self.AltReturnAddress4_nsprefix_ = None + self.AltReturnAddress5 = AltReturnAddress5 + self.AltReturnAddress5_nsprefix_ = None + self.AltReturnAddress6 = AltReturnAddress6 + self.AltReturnAddress6_nsprefix_ = None + self.AltReturnCountry = AltReturnCountry + self.AltReturnCountry_nsprefix_ = None + self.LabelImportType = LabelImportType + self.LabelImportType_nsprefix_ = None + self.ePostageMailerReporting = ePostageMailerReporting + self.ePostageMailerReporting_nsprefix_ = None + self.SenderFirstName = SenderFirstName + self.SenderFirstName_nsprefix_ = None + self.SenderLastName = SenderLastName + self.SenderLastName_nsprefix_ = None + self.SenderBusinessName = SenderBusinessName + self.SenderBusinessName_nsprefix_ = None + self.SenderAddress1 = SenderAddress1 + self.SenderAddress1_nsprefix_ = None + self.SenderCity = SenderCity + self.SenderCity_nsprefix_ = None + self.SenderState = SenderState + self.SenderState_nsprefix_ = None + self.SenderZip5 = SenderZip5 + self.SenderZip5_nsprefix_ = None + self.SenderPhone = SenderPhone + self.SenderPhone_nsprefix_ = None + self.ChargebackCode = ChargebackCode + self.ChargebackCode_nsprefix_ = None + self.TrackingRetentionPeriod = TrackingRetentionPeriod + self.TrackingRetentionPeriod_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, eVSRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if eVSRequest.subclass: + return eVSRequest.subclass(*args_, **kwargs_) + else: + return eVSRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Option(self): + return self.Option + def set_Option(self, Option): + self.Option = Option + def get_Revision(self): + return self.Revision + def set_Revision(self, Revision): + self.Revision = Revision + def get_ImageParameters(self): + return self.ImageParameters + def set_ImageParameters(self, ImageParameters): + self.ImageParameters = ImageParameters + def get_FromName(self): + return self.FromName + def set_FromName(self, FromName): + self.FromName = FromName + def get_FromFirm(self): + return self.FromFirm + def set_FromFirm(self, FromFirm): + self.FromFirm = FromFirm + def get_FromAddress1(self): + return self.FromAddress1 + def set_FromAddress1(self, FromAddress1): + self.FromAddress1 = FromAddress1 + def get_FromAddress2(self): + return self.FromAddress2 + def set_FromAddress2(self, FromAddress2): + self.FromAddress2 = FromAddress2 + def get_FromCity(self): + return self.FromCity + def set_FromCity(self, FromCity): + self.FromCity = FromCity + def get_FromState(self): + return self.FromState + def set_FromState(self, FromState): + self.FromState = FromState + def get_FromZip5(self): + return self.FromZip5 + def set_FromZip5(self, FromZip5): + self.FromZip5 = FromZip5 + def get_FromZip4(self): + return self.FromZip4 + def set_FromZip4(self, FromZip4): + self.FromZip4 = FromZip4 + def get_FromPhone(self): + return self.FromPhone + def set_FromPhone(self, FromPhone): + self.FromPhone = FromPhone + def get_POZipCode(self): + return self.POZipCode + def set_POZipCode(self, POZipCode): + self.POZipCode = POZipCode + def get_AllowNonCleansedOriginAddr(self): + return self.AllowNonCleansedOriginAddr + def set_AllowNonCleansedOriginAddr(self, AllowNonCleansedOriginAddr): + self.AllowNonCleansedOriginAddr = AllowNonCleansedOriginAddr + def get_ToName(self): + return self.ToName + def set_ToName(self, ToName): + self.ToName = ToName + def get_ToFirm(self): + return self.ToFirm + def set_ToFirm(self, ToFirm): + self.ToFirm = ToFirm + def get_ToAddress1(self): + return self.ToAddress1 + def set_ToAddress1(self, ToAddress1): + self.ToAddress1 = ToAddress1 + def get_ToAddress2(self): + return self.ToAddress2 + def set_ToAddress2(self, ToAddress2): + self.ToAddress2 = ToAddress2 + def get_ToCity(self): + return self.ToCity + def set_ToCity(self, ToCity): + self.ToCity = ToCity + def get_ToState(self): + return self.ToState + def set_ToState(self, ToState): + self.ToState = ToState + def get_ToZip5(self): + return self.ToZip5 + def set_ToZip5(self, ToZip5): + self.ToZip5 = ToZip5 + def get_ToZip4(self): + return self.ToZip4 + def set_ToZip4(self, ToZip4): + self.ToZip4 = ToZip4 + def get_ToPhone(self): + return self.ToPhone + def set_ToPhone(self, ToPhone): + self.ToPhone = ToPhone + def get_POBox(self): + return self.POBox + def set_POBox(self, POBox): + self.POBox = POBox + def get_ToContactPreference(self): + return self.ToContactPreference + def set_ToContactPreference(self, ToContactPreference): + self.ToContactPreference = ToContactPreference + def get_ToContactMessaging(self): + return self.ToContactMessaging + def set_ToContactMessaging(self, ToContactMessaging): + self.ToContactMessaging = ToContactMessaging + def get_ToContactEmail(self): + return self.ToContactEmail + def set_ToContactEmail(self, ToContactEmail): + self.ToContactEmail = ToContactEmail + def get_AllowNonCleansedDestAddr(self): + return self.AllowNonCleansedDestAddr + def set_AllowNonCleansedDestAddr(self, AllowNonCleansedDestAddr): + self.AllowNonCleansedDestAddr = AllowNonCleansedDestAddr + def get_WeightInOunces(self): + return self.WeightInOunces + def set_WeightInOunces(self, WeightInOunces): + self.WeightInOunces = WeightInOunces + def get_ServiceType(self): + return self.ServiceType + def set_ServiceType(self, ServiceType): + self.ServiceType = ServiceType + def get_Container(self): + return self.Container + def set_Container(self, Container): + self.Container = Container + def get_Width(self): + return self.Width + def set_Width(self, Width): + self.Width = Width + def get_Length(self): + return self.Length + def set_Length(self, Length): + self.Length = Length + def get_Height(self): + return self.Height + def set_Height(self, Height): + self.Height = Height + def get_Girth(self): + return self.Girth + def set_Girth(self, Girth): + self.Girth = Girth + def get_Machinable(self): + return self.Machinable + def set_Machinable(self, Machinable): + self.Machinable = Machinable + def get_ProcessingCategory(self): + return self.ProcessingCategory + def set_ProcessingCategory(self, ProcessingCategory): + self.ProcessingCategory = ProcessingCategory + def get_PriceOptions(self): + return self.PriceOptions + def set_PriceOptions(self, PriceOptions): + self.PriceOptions = PriceOptions + def get_InsuredAmount(self): + return self.InsuredAmount + def set_InsuredAmount(self, InsuredAmount): + self.InsuredAmount = InsuredAmount + def get_AddressServiceRequested(self): + return self.AddressServiceRequested + def set_AddressServiceRequested(self, AddressServiceRequested): + self.AddressServiceRequested = AddressServiceRequested + def get_ExpressMailOptions(self): + return self.ExpressMailOptions + def set_ExpressMailOptions(self, ExpressMailOptions): + self.ExpressMailOptions = ExpressMailOptions + def get_ShipDate(self): + return self.ShipDate + def set_ShipDate(self, ShipDate): + self.ShipDate = ShipDate + def get_CustomerRefNo(self): + return self.CustomerRefNo + def set_CustomerRefNo(self, CustomerRefNo): + self.CustomerRefNo = CustomerRefNo + def get_CustomerRefNo2(self): + return self.CustomerRefNo2 + def set_CustomerRefNo2(self, CustomerRefNo2): + self.CustomerRefNo2 = CustomerRefNo2 + def get_ExtraServices(self): + return self.ExtraServices + def set_ExtraServices(self, ExtraServices): + self.ExtraServices = ExtraServices + def get_HoldForPickup(self): + return self.HoldForPickup + def set_HoldForPickup(self, HoldForPickup): + self.HoldForPickup = HoldForPickup + def get_OpenDistribute(self): + return self.OpenDistribute + def set_OpenDistribute(self, OpenDistribute): + self.OpenDistribute = OpenDistribute + def get_PermitNumber(self): + return self.PermitNumber + def set_PermitNumber(self, PermitNumber): + self.PermitNumber = PermitNumber + def get_PermitZIPCode(self): + return self.PermitZIPCode + def set_PermitZIPCode(self, PermitZIPCode): + self.PermitZIPCode = PermitZIPCode + def get_PermitHolderName(self): + return self.PermitHolderName + def set_PermitHolderName(self, PermitHolderName): + self.PermitHolderName = PermitHolderName + def get_CRID(self): + return self.CRID + def set_CRID(self, CRID): + self.CRID = CRID + def get_MID(self): + return self.MID + def set_MID(self, MID): + self.MID = MID + def get_LogisticsManagerMID(self): + return self.LogisticsManagerMID + def set_LogisticsManagerMID(self, LogisticsManagerMID): + self.LogisticsManagerMID = LogisticsManagerMID + def get_VendorCode(self): + return self.VendorCode + def set_VendorCode(self, VendorCode): + self.VendorCode = VendorCode + def get_VendorProductVersionNumber(self): + return self.VendorProductVersionNumber + def set_VendorProductVersionNumber(self, VendorProductVersionNumber): + self.VendorProductVersionNumber = VendorProductVersionNumber + def get_SenderName(self): + return self.SenderName + def set_SenderName(self, SenderName): + self.SenderName = SenderName + def get_SenderEMail(self): + return self.SenderEMail + def set_SenderEMail(self, SenderEMail): + self.SenderEMail = SenderEMail + def get_RecipientName(self): + return self.RecipientName + def set_RecipientName(self, RecipientName): + self.RecipientName = RecipientName + def get_RecipientEMail(self): + return self.RecipientEMail + def set_RecipientEMail(self, RecipientEMail): + self.RecipientEMail = RecipientEMail + def get_ReceiptOption(self): + return self.ReceiptOption + def set_ReceiptOption(self, ReceiptOption): + self.ReceiptOption = ReceiptOption + def get_ImageType(self): + return self.ImageType + def set_ImageType(self, ImageType): + self.ImageType = ImageType + def get_HoldForManifest(self): + return self.HoldForManifest + def set_HoldForManifest(self, HoldForManifest): + self.HoldForManifest = HoldForManifest + def get_NineDigitRoutingZip(self): + return self.NineDigitRoutingZip + def set_NineDigitRoutingZip(self, NineDigitRoutingZip): + self.NineDigitRoutingZip = NineDigitRoutingZip + def get_ShipInfo(self): + return self.ShipInfo + def set_ShipInfo(self, ShipInfo): + self.ShipInfo = ShipInfo + def get_CarrierRelease(self): + return self.CarrierRelease + def set_CarrierRelease(self, CarrierRelease): + self.CarrierRelease = CarrierRelease + def get_DropOffTime(self): + return self.DropOffTime + def set_DropOffTime(self, DropOffTime): + self.DropOffTime = DropOffTime + def get_ReturnCommitments(self): + return self.ReturnCommitments + def set_ReturnCommitments(self, ReturnCommitments): + self.ReturnCommitments = ReturnCommitments + def get_PrintCustomerRefNo(self): + return self.PrintCustomerRefNo + def set_PrintCustomerRefNo(self, PrintCustomerRefNo): + self.PrintCustomerRefNo = PrintCustomerRefNo + def get_Content(self): + return self.Content + def set_Content(self, Content): + self.Content = Content + def get_ActionCode(self): + return self.ActionCode + def set_ActionCode(self, ActionCode): + self.ActionCode = ActionCode + def get_OptOutOfSPE(self): + return self.OptOutOfSPE + def set_OptOutOfSPE(self, OptOutOfSPE): + self.OptOutOfSPE = OptOutOfSPE + def get_SortationLevel(self): + return self.SortationLevel + def set_SortationLevel(self, SortationLevel): + self.SortationLevel = SortationLevel + def get_DestinationEntryFacilityType(self): + return self.DestinationEntryFacilityType + def set_DestinationEntryFacilityType(self, DestinationEntryFacilityType): + self.DestinationEntryFacilityType = DestinationEntryFacilityType + def get_ShippingContents(self): + return self.ShippingContents + def set_ShippingContents(self, ShippingContents): + self.ShippingContents = ShippingContents + def get_CustomsContentType(self): + return self.CustomsContentType + def set_CustomsContentType(self, CustomsContentType): + self.CustomsContentType = CustomsContentType + def get_ContentComments(self): + return self.ContentComments + def set_ContentComments(self, ContentComments): + self.ContentComments = ContentComments + def get_RestrictionType(self): + return self.RestrictionType + def set_RestrictionType(self, RestrictionType): + self.RestrictionType = RestrictionType + def get_RestrictionComments(self): + return self.RestrictionComments + def set_RestrictionComments(self, RestrictionComments): + self.RestrictionComments = RestrictionComments + def get_AESITN(self): + return self.AESITN + def set_AESITN(self, AESITN): + self.AESITN = AESITN + def get_ImportersReference(self): + return self.ImportersReference + def set_ImportersReference(self, ImportersReference): + self.ImportersReference = ImportersReference + def get_ImportersContact(self): + return self.ImportersContact + def set_ImportersContact(self, ImportersContact): + self.ImportersContact = ImportersContact + def get_ExportersReference(self): + return self.ExportersReference + def set_ExportersReference(self, ExportersReference): + self.ExportersReference = ExportersReference + def get_ExportersContact(self): + return self.ExportersContact + def set_ExportersContact(self, ExportersContact): + self.ExportersContact = ExportersContact + def get_InvoiceNumber(self): + return self.InvoiceNumber + def set_InvoiceNumber(self, InvoiceNumber): + self.InvoiceNumber = InvoiceNumber + def get_LicenseNumber(self): + return self.LicenseNumber + def set_LicenseNumber(self, LicenseNumber): + self.LicenseNumber = LicenseNumber + def get_CertificateNumber(self): + return self.CertificateNumber + def set_CertificateNumber(self, CertificateNumber): + self.CertificateNumber = CertificateNumber + def get_NonDeliveryOption(self): + return self.NonDeliveryOption + def set_NonDeliveryOption(self, NonDeliveryOption): + self.NonDeliveryOption = NonDeliveryOption + def get_AltReturnAddress1(self): + return self.AltReturnAddress1 + def set_AltReturnAddress1(self, AltReturnAddress1): + self.AltReturnAddress1 = AltReturnAddress1 + def get_AltReturnAddress2(self): + return self.AltReturnAddress2 + def set_AltReturnAddress2(self, AltReturnAddress2): + self.AltReturnAddress2 = AltReturnAddress2 + def get_AltReturnAddress3(self): + return self.AltReturnAddress3 + def set_AltReturnAddress3(self, AltReturnAddress3): + self.AltReturnAddress3 = AltReturnAddress3 + def get_AltReturnAddress4(self): + return self.AltReturnAddress4 + def set_AltReturnAddress4(self, AltReturnAddress4): + self.AltReturnAddress4 = AltReturnAddress4 + def get_AltReturnAddress5(self): + return self.AltReturnAddress5 + def set_AltReturnAddress5(self, AltReturnAddress5): + self.AltReturnAddress5 = AltReturnAddress5 + def get_AltReturnAddress6(self): + return self.AltReturnAddress6 + def set_AltReturnAddress6(self, AltReturnAddress6): + self.AltReturnAddress6 = AltReturnAddress6 + def get_AltReturnCountry(self): + return self.AltReturnCountry + def set_AltReturnCountry(self, AltReturnCountry): + self.AltReturnCountry = AltReturnCountry + def get_LabelImportType(self): + return self.LabelImportType + def set_LabelImportType(self, LabelImportType): + self.LabelImportType = LabelImportType + def get_ePostageMailerReporting(self): + return self.ePostageMailerReporting + def set_ePostageMailerReporting(self, ePostageMailerReporting): + self.ePostageMailerReporting = ePostageMailerReporting + def get_SenderFirstName(self): + return self.SenderFirstName + def set_SenderFirstName(self, SenderFirstName): + self.SenderFirstName = SenderFirstName + def get_SenderLastName(self): + return self.SenderLastName + def set_SenderLastName(self, SenderLastName): + self.SenderLastName = SenderLastName + def get_SenderBusinessName(self): + return self.SenderBusinessName + def set_SenderBusinessName(self, SenderBusinessName): + self.SenderBusinessName = SenderBusinessName + def get_SenderAddress1(self): + return self.SenderAddress1 + def set_SenderAddress1(self, SenderAddress1): + self.SenderAddress1 = SenderAddress1 + def get_SenderCity(self): + return self.SenderCity + def set_SenderCity(self, SenderCity): + self.SenderCity = SenderCity + def get_SenderState(self): + return self.SenderState + def set_SenderState(self, SenderState): + self.SenderState = SenderState + def get_SenderZip5(self): + return self.SenderZip5 + def set_SenderZip5(self, SenderZip5): + self.SenderZip5 = SenderZip5 + def get_SenderPhone(self): + return self.SenderPhone + def set_SenderPhone(self, SenderPhone): + self.SenderPhone = SenderPhone + def get_ChargebackCode(self): + return self.ChargebackCode + def set_ChargebackCode(self, ChargebackCode): + self.ChargebackCode = ChargebackCode + def get_TrackingRetentionPeriod(self): + return self.TrackingRetentionPeriod + def set_TrackingRetentionPeriod(self, TrackingRetentionPeriod): + self.TrackingRetentionPeriod = TrackingRetentionPeriod + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def validate_ShipDateType(self, value): + result = True + # Validate type ShipDateType, a restriction on xs:string. + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False + if not self.gds_validate_simple_patterns( + self.validate_ShipDateType_patterns_, value): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_ShipDateType_patterns_, )) + result = False + return result + validate_ShipDateType_patterns_ = [['^(\\d{1,2}/ \\d{1,2}/ \\d\\d(\\d\\d)?)$']] + def has__content(self): + if ( + self.Option is not None or + self.Revision is not None or + self.ImageParameters is not None or + self.FromName is not None or + self.FromFirm is not None or + self.FromAddress1 is not None or + self.FromAddress2 is not None or + self.FromCity is not None or + self.FromState is not None or + self.FromZip5 is not None or + self.FromZip4 is not None or + self.FromPhone is not None or + self.POZipCode is not None or + self.AllowNonCleansedOriginAddr is not None or + self.ToName is not None or + self.ToFirm is not None or + self.ToAddress1 is not None or + self.ToAddress2 is not None or + self.ToCity is not None or + self.ToState is not None or + self.ToZip5 is not None or + self.ToZip4 is not None or + self.ToPhone is not None or + self.POBox is not None or + self.ToContactPreference is not None or + self.ToContactMessaging is not None or + self.ToContactEmail is not None or + self.AllowNonCleansedDestAddr is not None or + self.WeightInOunces is not None or + self.ServiceType is not None or + self.Container is not None or + self.Width is not None or + self.Length is not None or + self.Height is not None or + self.Girth is not None or + self.Machinable is not None or + self.ProcessingCategory is not None or + self.PriceOptions is not None or + self.InsuredAmount is not None or + self.AddressServiceRequested is not None or + self.ExpressMailOptions is not None or + self.ShipDate is not None or + self.CustomerRefNo is not None or + self.CustomerRefNo2 is not None or + self.ExtraServices is not None or + self.HoldForPickup is not None or + self.OpenDistribute is not None or + self.PermitNumber is not None or + self.PermitZIPCode is not None or + self.PermitHolderName is not None or + self.CRID is not None or + self.MID is not None or + self.LogisticsManagerMID is not None or + self.VendorCode is not None or + self.VendorProductVersionNumber is not None or + self.SenderName is not None or + self.SenderEMail is not None or + self.RecipientName is not None or + self.RecipientEMail is not None or + self.ReceiptOption is not None or + self.ImageType is not None or + self.HoldForManifest is not None or + self.NineDigitRoutingZip is not None or + self.ShipInfo is not None or + self.CarrierRelease is not None or + self.DropOffTime is not None or + self.ReturnCommitments is not None or + self.PrintCustomerRefNo is not None or + self.Content is not None or + self.ActionCode is not None or + self.OptOutOfSPE is not None or + self.SortationLevel is not None or + self.DestinationEntryFacilityType is not None or + self.ShippingContents is not None or + self.CustomsContentType is not None or + self.ContentComments is not None or + self.RestrictionType is not None or + self.RestrictionComments is not None or + self.AESITN is not None or + self.ImportersReference is not None or + self.ImportersContact is not None or + self.ExportersReference is not None or + self.ExportersContact is not None or + self.InvoiceNumber is not None or + self.LicenseNumber is not None or + self.CertificateNumber is not None or + self.NonDeliveryOption is not None or + self.AltReturnAddress1 is not None or + self.AltReturnAddress2 is not None or + self.AltReturnAddress3 is not None or + self.AltReturnAddress4 is not None or + self.AltReturnAddress5 is not None or + self.AltReturnAddress6 is not None or + self.AltReturnCountry is not None or + self.LabelImportType is not None or + self.ePostageMailerReporting is not None or + self.SenderFirstName is not None or + self.SenderLastName is not None or + self.SenderBusinessName is not None or + self.SenderAddress1 is not None or + self.SenderCity is not None or + self.SenderState is not None or + self.SenderZip5 is not None or + self.SenderPhone is not None or + self.ChargebackCode is not None or + self.TrackingRetentionPeriod is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('eVSRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'eVSRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='eVSRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='eVSRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='eVSRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Option is not None: + namespaceprefix_ = self.Option_nsprefix_ + ':' if (UseCapturedNS_ and self.Option_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOption>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Option, input_name='Option'), namespaceprefix_ , eol_)) + if self.Revision is not None: + namespaceprefix_ = self.Revision_nsprefix_ + ':' if (UseCapturedNS_ and self.Revision_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRevision>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Revision), input_name='Revision')), namespaceprefix_ , eol_)) + if self.ImageParameters is not None: + namespaceprefix_ = self.ImageParameters_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageParameters_nsprefix_) else '' + self.ImageParameters.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ImageParameters', pretty_print=pretty_print) + if self.FromName is not None: + namespaceprefix_ = self.FromName_nsprefix_ + ':' if (UseCapturedNS_ and self.FromName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromName), input_name='FromName')), namespaceprefix_ , eol_)) + if self.FromFirm is not None: + namespaceprefix_ = self.FromFirm_nsprefix_ + ':' if (UseCapturedNS_ and self.FromFirm_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromFirm>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromFirm), input_name='FromFirm')), namespaceprefix_ , eol_)) + if self.FromAddress1 is not None: + namespaceprefix_ = self.FromAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.FromAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromAddress1), input_name='FromAddress1')), namespaceprefix_ , eol_)) + if self.FromAddress2 is not None: + namespaceprefix_ = self.FromAddress2_nsprefix_ + ':' if (UseCapturedNS_ and self.FromAddress2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromAddress2), input_name='FromAddress2')), namespaceprefix_ , eol_)) + if self.FromCity is not None: + namespaceprefix_ = self.FromCity_nsprefix_ + ':' if (UseCapturedNS_ and self.FromCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromCity), input_name='FromCity')), namespaceprefix_ , eol_)) + if self.FromState is not None: + namespaceprefix_ = self.FromState_nsprefix_ + ':' if (UseCapturedNS_ and self.FromState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromState), input_name='FromState')), namespaceprefix_ , eol_)) + if self.FromZip5 is not None: + namespaceprefix_ = self.FromZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.FromZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromZip5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromZip5), input_name='FromZip5')), namespaceprefix_ , eol_)) + if self.FromZip4 is not None: + namespaceprefix_ = self.FromZip4_nsprefix_ + ':' if (UseCapturedNS_ and self.FromZip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromZip4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromZip4), input_name='FromZip4')), namespaceprefix_ , eol_)) + if self.FromPhone is not None: + namespaceprefix_ = self.FromPhone_nsprefix_ + ':' if (UseCapturedNS_ and self.FromPhone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromPhone), input_name='FromPhone')), namespaceprefix_ , eol_)) + if self.POZipCode is not None: + namespaceprefix_ = self.POZipCode_nsprefix_ + ':' if (UseCapturedNS_ and self.POZipCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPOZipCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.POZipCode), input_name='POZipCode')), namespaceprefix_ , eol_)) + if self.AllowNonCleansedOriginAddr is not None: + namespaceprefix_ = self.AllowNonCleansedOriginAddr_nsprefix_ + ':' if (UseCapturedNS_ and self.AllowNonCleansedOriginAddr_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAllowNonCleansedOriginAddr>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.AllowNonCleansedOriginAddr, input_name='AllowNonCleansedOriginAddr'), namespaceprefix_ , eol_)) + if self.ToName is not None: + namespaceprefix_ = self.ToName_nsprefix_ + ':' if (UseCapturedNS_ and self.ToName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToName), input_name='ToName')), namespaceprefix_ , eol_)) + if self.ToFirm is not None: + namespaceprefix_ = self.ToFirm_nsprefix_ + ':' if (UseCapturedNS_ and self.ToFirm_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToFirm>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToFirm), input_name='ToFirm')), namespaceprefix_ , eol_)) + if self.ToAddress1 is not None: + namespaceprefix_ = self.ToAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress1), input_name='ToAddress1')), namespaceprefix_ , eol_)) + if self.ToAddress2 is not None: + namespaceprefix_ = self.ToAddress2_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress2), input_name='ToAddress2')), namespaceprefix_ , eol_)) + if self.ToCity is not None: + namespaceprefix_ = self.ToCity_nsprefix_ + ':' if (UseCapturedNS_ and self.ToCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToCity), input_name='ToCity')), namespaceprefix_ , eol_)) + if self.ToState is not None: + namespaceprefix_ = self.ToState_nsprefix_ + ':' if (UseCapturedNS_ and self.ToState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToState), input_name='ToState')), namespaceprefix_ , eol_)) + if self.ToZip5 is not None: + namespaceprefix_ = self.ToZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.ToZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToZip5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToZip5), input_name='ToZip5')), namespaceprefix_ , eol_)) + if self.ToZip4 is not None: + namespaceprefix_ = self.ToZip4_nsprefix_ + ':' if (UseCapturedNS_ and self.ToZip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToZip4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToZip4), input_name='ToZip4')), namespaceprefix_ , eol_)) + if self.ToPhone is not None: + namespaceprefix_ = self.ToPhone_nsprefix_ + ':' if (UseCapturedNS_ and self.ToPhone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToPhone), input_name='ToPhone')), namespaceprefix_ , eol_)) + if self.POBox is not None: + namespaceprefix_ = self.POBox_nsprefix_ + ':' if (UseCapturedNS_ and self.POBox_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPOBox>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.POBox), input_name='POBox')), namespaceprefix_ , eol_)) + if self.ToContactPreference is not None: + namespaceprefix_ = self.ToContactPreference_nsprefix_ + ':' if (UseCapturedNS_ and self.ToContactPreference_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToContactPreference>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToContactPreference), input_name='ToContactPreference')), namespaceprefix_ , eol_)) + if self.ToContactMessaging is not None: + namespaceprefix_ = self.ToContactMessaging_nsprefix_ + ':' if (UseCapturedNS_ and self.ToContactMessaging_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToContactMessaging>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToContactMessaging), input_name='ToContactMessaging')), namespaceprefix_ , eol_)) + if self.ToContactEmail is not None: + namespaceprefix_ = self.ToContactEmail_nsprefix_ + ':' if (UseCapturedNS_ and self.ToContactEmail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToContactEmail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToContactEmail), input_name='ToContactEmail')), namespaceprefix_ , eol_)) + if self.AllowNonCleansedDestAddr is not None: + namespaceprefix_ = self.AllowNonCleansedDestAddr_nsprefix_ + ':' if (UseCapturedNS_ and self.AllowNonCleansedDestAddr_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAllowNonCleansedDestAddr>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.AllowNonCleansedDestAddr, input_name='AllowNonCleansedDestAddr'), namespaceprefix_ , eol_)) + if self.WeightInOunces is not None: + namespaceprefix_ = self.WeightInOunces_nsprefix_ + ':' if (UseCapturedNS_ and self.WeightInOunces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sWeightInOunces>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.WeightInOunces), input_name='WeightInOunces')), namespaceprefix_ , eol_)) + if self.ServiceType is not None: + namespaceprefix_ = self.ServiceType_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceType), input_name='ServiceType')), namespaceprefix_ , eol_)) + if self.Container is not None: + namespaceprefix_ = self.Container_nsprefix_ + ':' if (UseCapturedNS_ and self.Container_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContainer>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Container), input_name='Container')), namespaceprefix_ , eol_)) + if self.Width is not None: + namespaceprefix_ = self.Width_nsprefix_ + ':' if (UseCapturedNS_ and self.Width_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sWidth>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Width, input_name='Width'), namespaceprefix_ , eol_)) + if self.Length is not None: + namespaceprefix_ = self.Length_nsprefix_ + ':' if (UseCapturedNS_ and self.Length_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLength>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Length, input_name='Length'), namespaceprefix_ , eol_)) + if self.Height is not None: + namespaceprefix_ = self.Height_nsprefix_ + ':' if (UseCapturedNS_ and self.Height_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHeight>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Height, input_name='Height'), namespaceprefix_ , eol_)) + if self.Girth is not None: + namespaceprefix_ = self.Girth_nsprefix_ + ':' if (UseCapturedNS_ and self.Girth_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGirth>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Girth, input_name='Girth'), namespaceprefix_ , eol_)) + if self.Machinable is not None: + namespaceprefix_ = self.Machinable_nsprefix_ + ':' if (UseCapturedNS_ and self.Machinable_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMachinable>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Machinable), input_name='Machinable')), namespaceprefix_ , eol_)) + if self.ProcessingCategory is not None: + namespaceprefix_ = self.ProcessingCategory_nsprefix_ + ':' if (UseCapturedNS_ and self.ProcessingCategory_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sProcessingCategory>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ProcessingCategory), input_name='ProcessingCategory')), namespaceprefix_ , eol_)) + if self.PriceOptions is not None: + namespaceprefix_ = self.PriceOptions_nsprefix_ + ':' if (UseCapturedNS_ and self.PriceOptions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPriceOptions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PriceOptions), input_name='PriceOptions')), namespaceprefix_ , eol_)) + if self.InsuredAmount is not None: + namespaceprefix_ = self.InsuredAmount_nsprefix_ + ':' if (UseCapturedNS_ and self.InsuredAmount_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInsuredAmount>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.InsuredAmount, input_name='InsuredAmount'), namespaceprefix_ , eol_)) + if self.AddressServiceRequested is not None: + namespaceprefix_ = self.AddressServiceRequested_nsprefix_ + ':' if (UseCapturedNS_ and self.AddressServiceRequested_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddressServiceRequested>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.AddressServiceRequested, input_name='AddressServiceRequested'), namespaceprefix_ , eol_)) + if self.ExpressMailOptions is not None: + namespaceprefix_ = self.ExpressMailOptions_nsprefix_ + ':' if (UseCapturedNS_ and self.ExpressMailOptions_nsprefix_) else '' + self.ExpressMailOptions.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ExpressMailOptions', pretty_print=pretty_print) + if self.ShipDate is not None: + namespaceprefix_ = self.ShipDate_nsprefix_ + ':' if (UseCapturedNS_ and self.ShipDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sShipDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ShipDate), input_name='ShipDate')), namespaceprefix_ , eol_)) + if self.CustomerRefNo is not None: + namespaceprefix_ = self.CustomerRefNo_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerRefNo_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerRefNo>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerRefNo), input_name='CustomerRefNo')), namespaceprefix_ , eol_)) + if self.CustomerRefNo2 is not None: + namespaceprefix_ = self.CustomerRefNo2_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerRefNo2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerRefNo2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerRefNo2), input_name='CustomerRefNo2')), namespaceprefix_ , eol_)) + if self.ExtraServices is not None: + namespaceprefix_ = self.ExtraServices_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraServices_nsprefix_) else '' + self.ExtraServices.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ExtraServices', pretty_print=pretty_print) + if self.HoldForPickup is not None: + namespaceprefix_ = self.HoldForPickup_nsprefix_ + ':' if (UseCapturedNS_ and self.HoldForPickup_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHoldForPickup>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.HoldForPickup), input_name='HoldForPickup')), namespaceprefix_ , eol_)) + if self.OpenDistribute is not None: + namespaceprefix_ = self.OpenDistribute_nsprefix_ + ':' if (UseCapturedNS_ and self.OpenDistribute_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOpenDistribute>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.OpenDistribute), input_name='OpenDistribute')), namespaceprefix_ , eol_)) + if self.PermitNumber is not None: + namespaceprefix_ = self.PermitNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PermitNumber), input_name='PermitNumber')), namespaceprefix_ , eol_)) + if self.PermitZIPCode is not None: + namespaceprefix_ = self.PermitZIPCode_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitZIPCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitZIPCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PermitZIPCode), input_name='PermitZIPCode')), namespaceprefix_ , eol_)) + if self.PermitHolderName is not None: + namespaceprefix_ = self.PermitHolderName_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitHolderName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitHolderName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PermitHolderName), input_name='PermitHolderName')), namespaceprefix_ , eol_)) + if self.CRID is not None: + namespaceprefix_ = self.CRID_nsprefix_ + ':' if (UseCapturedNS_ and self.CRID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCRID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CRID), input_name='CRID')), namespaceprefix_ , eol_)) + if self.MID is not None: + namespaceprefix_ = self.MID_nsprefix_ + ':' if (UseCapturedNS_ and self.MID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MID), input_name='MID')), namespaceprefix_ , eol_)) + if self.LogisticsManagerMID is not None: + namespaceprefix_ = self.LogisticsManagerMID_nsprefix_ + ':' if (UseCapturedNS_ and self.LogisticsManagerMID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLogisticsManagerMID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LogisticsManagerMID), input_name='LogisticsManagerMID')), namespaceprefix_ , eol_)) + if self.VendorCode is not None: + namespaceprefix_ = self.VendorCode_nsprefix_ + ':' if (UseCapturedNS_ and self.VendorCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sVendorCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.VendorCode), input_name='VendorCode')), namespaceprefix_ , eol_)) + if self.VendorProductVersionNumber is not None: + namespaceprefix_ = self.VendorProductVersionNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.VendorProductVersionNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sVendorProductVersionNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.VendorProductVersionNumber), input_name='VendorProductVersionNumber')), namespaceprefix_ , eol_)) + if self.SenderName is not None: + namespaceprefix_ = self.SenderName_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderName), input_name='SenderName')), namespaceprefix_ , eol_)) + if self.SenderEMail is not None: + namespaceprefix_ = self.SenderEMail_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderEMail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderEMail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderEMail), input_name='SenderEMail')), namespaceprefix_ , eol_)) + if self.RecipientName is not None: + namespaceprefix_ = self.RecipientName_nsprefix_ + ':' if (UseCapturedNS_ and self.RecipientName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRecipientName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RecipientName), input_name='RecipientName')), namespaceprefix_ , eol_)) + if self.RecipientEMail is not None: + namespaceprefix_ = self.RecipientEMail_nsprefix_ + ':' if (UseCapturedNS_ and self.RecipientEMail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRecipientEMail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RecipientEMail), input_name='RecipientEMail')), namespaceprefix_ , eol_)) + if self.ReceiptOption is not None: + namespaceprefix_ = self.ReceiptOption_nsprefix_ + ':' if (UseCapturedNS_ and self.ReceiptOption_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sReceiptOption>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ReceiptOption), input_name='ReceiptOption')), namespaceprefix_ , eol_)) + if self.ImageType is not None: + namespaceprefix_ = self.ImageType_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageType), input_name='ImageType')), namespaceprefix_ , eol_)) + if self.HoldForManifest is not None: + namespaceprefix_ = self.HoldForManifest_nsprefix_ + ':' if (UseCapturedNS_ and self.HoldForManifest_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHoldForManifest>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.HoldForManifest), input_name='HoldForManifest')), namespaceprefix_ , eol_)) + if self.NineDigitRoutingZip is not None: + namespaceprefix_ = self.NineDigitRoutingZip_nsprefix_ + ':' if (UseCapturedNS_ and self.NineDigitRoutingZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNineDigitRoutingZip>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.NineDigitRoutingZip, input_name='NineDigitRoutingZip'), namespaceprefix_ , eol_)) + if self.ShipInfo is not None: + namespaceprefix_ = self.ShipInfo_nsprefix_ + ':' if (UseCapturedNS_ and self.ShipInfo_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sShipInfo>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.ShipInfo, input_name='ShipInfo'), namespaceprefix_ , eol_)) + if self.CarrierRelease is not None: + namespaceprefix_ = self.CarrierRelease_nsprefix_ + ':' if (UseCapturedNS_ and self.CarrierRelease_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCarrierRelease>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.CarrierRelease, input_name='CarrierRelease'), namespaceprefix_ , eol_)) + if self.DropOffTime is not None: + namespaceprefix_ = self.DropOffTime_nsprefix_ + ':' if (UseCapturedNS_ and self.DropOffTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDropOffTime>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.DropOffTime, input_name='DropOffTime'), namespaceprefix_ , eol_)) + if self.ReturnCommitments is not None: + namespaceprefix_ = self.ReturnCommitments_nsprefix_ + ':' if (UseCapturedNS_ and self.ReturnCommitments_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sReturnCommitments>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.ReturnCommitments, input_name='ReturnCommitments'), namespaceprefix_ , eol_)) + if self.PrintCustomerRefNo is not None: + namespaceprefix_ = self.PrintCustomerRefNo_nsprefix_ + ':' if (UseCapturedNS_ and self.PrintCustomerRefNo_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPrintCustomerRefNo>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PrintCustomerRefNo), input_name='PrintCustomerRefNo')), namespaceprefix_ , eol_)) + if self.Content is not None: + namespaceprefix_ = self.Content_nsprefix_ + ':' if (UseCapturedNS_ and self.Content_nsprefix_) else '' + self.Content.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Content', pretty_print=pretty_print) + if self.ActionCode is not None: + namespaceprefix_ = self.ActionCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ActionCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sActionCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ActionCode), input_name='ActionCode')), namespaceprefix_ , eol_)) + if self.OptOutOfSPE is not None: + namespaceprefix_ = self.OptOutOfSPE_nsprefix_ + ':' if (UseCapturedNS_ and self.OptOutOfSPE_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOptOutOfSPE>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.OptOutOfSPE), input_name='OptOutOfSPE')), namespaceprefix_ , eol_)) + if self.SortationLevel is not None: + namespaceprefix_ = self.SortationLevel_nsprefix_ + ':' if (UseCapturedNS_ and self.SortationLevel_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSortationLevel>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SortationLevel), input_name='SortationLevel')), namespaceprefix_ , eol_)) + if self.DestinationEntryFacilityType is not None: + namespaceprefix_ = self.DestinationEntryFacilityType_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationEntryFacilityType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationEntryFacilityType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DestinationEntryFacilityType), input_name='DestinationEntryFacilityType')), namespaceprefix_ , eol_)) + if self.ShippingContents is not None: + namespaceprefix_ = self.ShippingContents_nsprefix_ + ':' if (UseCapturedNS_ and self.ShippingContents_nsprefix_) else '' + self.ShippingContents.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ShippingContents', pretty_print=pretty_print) + if self.CustomsContentType is not None: + namespaceprefix_ = self.CustomsContentType_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomsContentType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomsContentType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomsContentType), input_name='CustomsContentType')), namespaceprefix_ , eol_)) + if self.ContentComments is not None: + namespaceprefix_ = self.ContentComments_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentComments_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentComments>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentComments), input_name='ContentComments')), namespaceprefix_ , eol_)) + if self.RestrictionType is not None: + namespaceprefix_ = self.RestrictionType_nsprefix_ + ':' if (UseCapturedNS_ and self.RestrictionType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRestrictionType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RestrictionType), input_name='RestrictionType')), namespaceprefix_ , eol_)) + if self.RestrictionComments is not None: + namespaceprefix_ = self.RestrictionComments_nsprefix_ + ':' if (UseCapturedNS_ and self.RestrictionComments_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRestrictionComments>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RestrictionComments), input_name='RestrictionComments')), namespaceprefix_ , eol_)) + if self.AESITN is not None: + namespaceprefix_ = self.AESITN_nsprefix_ + ':' if (UseCapturedNS_ and self.AESITN_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAESITN>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AESITN), input_name='AESITN')), namespaceprefix_ , eol_)) + if self.ImportersReference is not None: + namespaceprefix_ = self.ImportersReference_nsprefix_ + ':' if (UseCapturedNS_ and self.ImportersReference_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImportersReference>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImportersReference), input_name='ImportersReference')), namespaceprefix_ , eol_)) + if self.ImportersContact is not None: + namespaceprefix_ = self.ImportersContact_nsprefix_ + ':' if (UseCapturedNS_ and self.ImportersContact_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImportersContact>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImportersContact), input_name='ImportersContact')), namespaceprefix_ , eol_)) + if self.ExportersReference is not None: + namespaceprefix_ = self.ExportersReference_nsprefix_ + ':' if (UseCapturedNS_ and self.ExportersReference_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sExportersReference>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ExportersReference), input_name='ExportersReference')), namespaceprefix_ , eol_)) + if self.ExportersContact is not None: + namespaceprefix_ = self.ExportersContact_nsprefix_ + ':' if (UseCapturedNS_ and self.ExportersContact_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sExportersContact>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ExportersContact), input_name='ExportersContact')), namespaceprefix_ , eol_)) + if self.InvoiceNumber is not None: + namespaceprefix_ = self.InvoiceNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.InvoiceNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInvoiceNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InvoiceNumber), input_name='InvoiceNumber')), namespaceprefix_ , eol_)) + if self.LicenseNumber is not None: + namespaceprefix_ = self.LicenseNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.LicenseNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLicenseNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LicenseNumber), input_name='LicenseNumber')), namespaceprefix_ , eol_)) + if self.CertificateNumber is not None: + namespaceprefix_ = self.CertificateNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.CertificateNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCertificateNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CertificateNumber), input_name='CertificateNumber')), namespaceprefix_ , eol_)) + if self.NonDeliveryOption is not None: + namespaceprefix_ = self.NonDeliveryOption_nsprefix_ + ':' if (UseCapturedNS_ and self.NonDeliveryOption_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNonDeliveryOption>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.NonDeliveryOption), input_name='NonDeliveryOption')), namespaceprefix_ , eol_)) + if self.AltReturnAddress1 is not None: + namespaceprefix_ = self.AltReturnAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.AltReturnAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAltReturnAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AltReturnAddress1), input_name='AltReturnAddress1')), namespaceprefix_ , eol_)) + if self.AltReturnAddress2 is not None: + namespaceprefix_ = self.AltReturnAddress2_nsprefix_ + ':' if (UseCapturedNS_ and self.AltReturnAddress2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAltReturnAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AltReturnAddress2), input_name='AltReturnAddress2')), namespaceprefix_ , eol_)) + if self.AltReturnAddress3 is not None: + namespaceprefix_ = self.AltReturnAddress3_nsprefix_ + ':' if (UseCapturedNS_ and self.AltReturnAddress3_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAltReturnAddress3>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AltReturnAddress3), input_name='AltReturnAddress3')), namespaceprefix_ , eol_)) + if self.AltReturnAddress4 is not None: + namespaceprefix_ = self.AltReturnAddress4_nsprefix_ + ':' if (UseCapturedNS_ and self.AltReturnAddress4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAltReturnAddress4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AltReturnAddress4), input_name='AltReturnAddress4')), namespaceprefix_ , eol_)) + if self.AltReturnAddress5 is not None: + namespaceprefix_ = self.AltReturnAddress5_nsprefix_ + ':' if (UseCapturedNS_ and self.AltReturnAddress5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAltReturnAddress5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AltReturnAddress5), input_name='AltReturnAddress5')), namespaceprefix_ , eol_)) + if self.AltReturnAddress6 is not None: + namespaceprefix_ = self.AltReturnAddress6_nsprefix_ + ':' if (UseCapturedNS_ and self.AltReturnAddress6_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAltReturnAddress6>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AltReturnAddress6), input_name='AltReturnAddress6')), namespaceprefix_ , eol_)) + if self.AltReturnCountry is not None: + namespaceprefix_ = self.AltReturnCountry_nsprefix_ + ':' if (UseCapturedNS_ and self.AltReturnCountry_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAltReturnCountry>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AltReturnCountry), input_name='AltReturnCountry')), namespaceprefix_ , eol_)) + if self.LabelImportType is not None: + namespaceprefix_ = self.LabelImportType_nsprefix_ + ':' if (UseCapturedNS_ and self.LabelImportType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLabelImportType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LabelImportType), input_name='LabelImportType')), namespaceprefix_ , eol_)) + if self.ePostageMailerReporting is not None: + namespaceprefix_ = self.ePostageMailerReporting_nsprefix_ + ':' if (UseCapturedNS_ and self.ePostageMailerReporting_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sePostageMailerReporting>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ePostageMailerReporting), input_name='ePostageMailerReporting')), namespaceprefix_ , eol_)) + if self.SenderFirstName is not None: + namespaceprefix_ = self.SenderFirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderFirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderFirstName), input_name='SenderFirstName')), namespaceprefix_ , eol_)) + if self.SenderLastName is not None: + namespaceprefix_ = self.SenderLastName_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderLastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderLastName), input_name='SenderLastName')), namespaceprefix_ , eol_)) + if self.SenderBusinessName is not None: + namespaceprefix_ = self.SenderBusinessName_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderBusinessName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderBusinessName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderBusinessName), input_name='SenderBusinessName')), namespaceprefix_ , eol_)) + if self.SenderAddress1 is not None: + namespaceprefix_ = self.SenderAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderAddress1), input_name='SenderAddress1')), namespaceprefix_ , eol_)) + if self.SenderCity is not None: + namespaceprefix_ = self.SenderCity_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderCity), input_name='SenderCity')), namespaceprefix_ , eol_)) + if self.SenderState is not None: + namespaceprefix_ = self.SenderState_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderState), input_name='SenderState')), namespaceprefix_ , eol_)) + if self.SenderZip5 is not None: + namespaceprefix_ = self.SenderZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderZip5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderZip5), input_name='SenderZip5')), namespaceprefix_ , eol_)) + if self.SenderPhone is not None: + namespaceprefix_ = self.SenderPhone_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderPhone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderPhone), input_name='SenderPhone')), namespaceprefix_ , eol_)) + if self.ChargebackCode is not None: + namespaceprefix_ = self.ChargebackCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ChargebackCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sChargebackCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ChargebackCode), input_name='ChargebackCode')), namespaceprefix_ , eol_)) + if self.TrackingRetentionPeriod is not None: + namespaceprefix_ = self.TrackingRetentionPeriod_nsprefix_ + ':' if (UseCapturedNS_ and self.TrackingRetentionPeriod_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTrackingRetentionPeriod>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TrackingRetentionPeriod), input_name='TrackingRetentionPeriod')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Option' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Option') + ival_ = self.gds_validate_integer(ival_, node, 'Option') + self.Option = ival_ + self.Option_nsprefix_ = child_.prefix + elif nodeName_ == 'Revision': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Revision') + value_ = self.gds_validate_string(value_, node, 'Revision') + self.Revision = value_ + self.Revision_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageParameters': + obj_ = ImageParametersType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ImageParameters = obj_ + obj_.original_tagname_ = 'ImageParameters' + elif nodeName_ == 'FromName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromName') + value_ = self.gds_validate_string(value_, node, 'FromName') + self.FromName = value_ + self.FromName_nsprefix_ = child_.prefix + elif nodeName_ == 'FromFirm': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromFirm') + value_ = self.gds_validate_string(value_, node, 'FromFirm') + self.FromFirm = value_ + self.FromFirm_nsprefix_ = child_.prefix + elif nodeName_ == 'FromAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromAddress1') + value_ = self.gds_validate_string(value_, node, 'FromAddress1') + self.FromAddress1 = value_ + self.FromAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'FromAddress2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromAddress2') + value_ = self.gds_validate_string(value_, node, 'FromAddress2') + self.FromAddress2 = value_ + self.FromAddress2_nsprefix_ = child_.prefix + elif nodeName_ == 'FromCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromCity') + value_ = self.gds_validate_string(value_, node, 'FromCity') + self.FromCity = value_ + self.FromCity_nsprefix_ = child_.prefix + elif nodeName_ == 'FromState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromState') + value_ = self.gds_validate_string(value_, node, 'FromState') + self.FromState = value_ + self.FromState_nsprefix_ = child_.prefix + elif nodeName_ == 'FromZip5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromZip5') + value_ = self.gds_validate_string(value_, node, 'FromZip5') + self.FromZip5 = value_ + self.FromZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'FromZip4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromZip4') + value_ = self.gds_validate_string(value_, node, 'FromZip4') + self.FromZip4 = value_ + self.FromZip4_nsprefix_ = child_.prefix + elif nodeName_ == 'FromPhone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromPhone') + value_ = self.gds_validate_string(value_, node, 'FromPhone') + self.FromPhone = value_ + self.FromPhone_nsprefix_ = child_.prefix + elif nodeName_ == 'POZipCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'POZipCode') + value_ = self.gds_validate_string(value_, node, 'POZipCode') + self.POZipCode = value_ + self.POZipCode_nsprefix_ = child_.prefix + elif nodeName_ == 'AllowNonCleansedOriginAddr': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'AllowNonCleansedOriginAddr') + ival_ = self.gds_validate_boolean(ival_, node, 'AllowNonCleansedOriginAddr') + self.AllowNonCleansedOriginAddr = ival_ + self.AllowNonCleansedOriginAddr_nsprefix_ = child_.prefix + elif nodeName_ == 'ToName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToName') + value_ = self.gds_validate_string(value_, node, 'ToName') + self.ToName = value_ + self.ToName_nsprefix_ = child_.prefix + elif nodeName_ == 'ToFirm': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToFirm') + value_ = self.gds_validate_string(value_, node, 'ToFirm') + self.ToFirm = value_ + self.ToFirm_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress1') + value_ = self.gds_validate_string(value_, node, 'ToAddress1') + self.ToAddress1 = value_ + self.ToAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress2') + value_ = self.gds_validate_string(value_, node, 'ToAddress2') + self.ToAddress2 = value_ + self.ToAddress2_nsprefix_ = child_.prefix + elif nodeName_ == 'ToCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToCity') + value_ = self.gds_validate_string(value_, node, 'ToCity') + self.ToCity = value_ + self.ToCity_nsprefix_ = child_.prefix + elif nodeName_ == 'ToState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToState') + value_ = self.gds_validate_string(value_, node, 'ToState') + self.ToState = value_ + self.ToState_nsprefix_ = child_.prefix + elif nodeName_ == 'ToZip5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToZip5') + value_ = self.gds_validate_string(value_, node, 'ToZip5') + self.ToZip5 = value_ + self.ToZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'ToZip4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToZip4') + value_ = self.gds_validate_string(value_, node, 'ToZip4') + self.ToZip4 = value_ + self.ToZip4_nsprefix_ = child_.prefix + elif nodeName_ == 'ToPhone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToPhone') + value_ = self.gds_validate_string(value_, node, 'ToPhone') + self.ToPhone = value_ + self.ToPhone_nsprefix_ = child_.prefix + elif nodeName_ == 'POBox': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'POBox') + value_ = self.gds_validate_string(value_, node, 'POBox') + self.POBox = value_ + self.POBox_nsprefix_ = child_.prefix + elif nodeName_ == 'ToContactPreference': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToContactPreference') + value_ = self.gds_validate_string(value_, node, 'ToContactPreference') + self.ToContactPreference = value_ + self.ToContactPreference_nsprefix_ = child_.prefix + elif nodeName_ == 'ToContactMessaging': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToContactMessaging') + value_ = self.gds_validate_string(value_, node, 'ToContactMessaging') + self.ToContactMessaging = value_ + self.ToContactMessaging_nsprefix_ = child_.prefix + elif nodeName_ == 'ToContactEmail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToContactEmail') + value_ = self.gds_validate_string(value_, node, 'ToContactEmail') + self.ToContactEmail = value_ + self.ToContactEmail_nsprefix_ = child_.prefix + elif nodeName_ == 'AllowNonCleansedDestAddr': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'AllowNonCleansedDestAddr') + ival_ = self.gds_validate_boolean(ival_, node, 'AllowNonCleansedDestAddr') + self.AllowNonCleansedDestAddr = ival_ + self.AllowNonCleansedDestAddr_nsprefix_ = child_.prefix + elif nodeName_ == 'WeightInOunces': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'WeightInOunces') + value_ = self.gds_validate_string(value_, node, 'WeightInOunces') + self.WeightInOunces = value_ + self.WeightInOunces_nsprefix_ = child_.prefix + elif nodeName_ == 'ServiceType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceType') + value_ = self.gds_validate_string(value_, node, 'ServiceType') + self.ServiceType = value_ + self.ServiceType_nsprefix_ = child_.prefix + elif nodeName_ == 'Container': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Container') + value_ = self.gds_validate_string(value_, node, 'Container') + self.Container = value_ + self.Container_nsprefix_ = child_.prefix + elif nodeName_ == 'Width' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Width') + fval_ = self.gds_validate_decimal(fval_, node, 'Width') + self.Width = fval_ + self.Width_nsprefix_ = child_.prefix + elif nodeName_ == 'Length' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Length') + fval_ = self.gds_validate_decimal(fval_, node, 'Length') + self.Length = fval_ + self.Length_nsprefix_ = child_.prefix + elif nodeName_ == 'Height' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Height') + fval_ = self.gds_validate_decimal(fval_, node, 'Height') + self.Height = fval_ + self.Height_nsprefix_ = child_.prefix + elif nodeName_ == 'Girth' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Girth') + fval_ = self.gds_validate_decimal(fval_, node, 'Girth') + self.Girth = fval_ + self.Girth_nsprefix_ = child_.prefix + elif nodeName_ == 'Machinable': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Machinable') + value_ = self.gds_validate_string(value_, node, 'Machinable') + self.Machinable = value_ + self.Machinable_nsprefix_ = child_.prefix + elif nodeName_ == 'ProcessingCategory': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ProcessingCategory') + value_ = self.gds_validate_string(value_, node, 'ProcessingCategory') + self.ProcessingCategory = value_ + self.ProcessingCategory_nsprefix_ = child_.prefix + elif nodeName_ == 'PriceOptions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PriceOptions') + value_ = self.gds_validate_string(value_, node, 'PriceOptions') + self.PriceOptions = value_ + self.PriceOptions_nsprefix_ = child_.prefix + elif nodeName_ == 'InsuredAmount' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'InsuredAmount') + fval_ = self.gds_validate_decimal(fval_, node, 'InsuredAmount') + self.InsuredAmount = fval_ + self.InsuredAmount_nsprefix_ = child_.prefix + elif nodeName_ == 'AddressServiceRequested': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'AddressServiceRequested') + ival_ = self.gds_validate_boolean(ival_, node, 'AddressServiceRequested') + self.AddressServiceRequested = ival_ + self.AddressServiceRequested_nsprefix_ = child_.prefix + elif nodeName_ == 'ExpressMailOptions': + obj_ = ExpressMailOptionsType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ExpressMailOptions = obj_ + obj_.original_tagname_ = 'ExpressMailOptions' + elif nodeName_ == 'ShipDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ShipDate') + value_ = self.gds_validate_string(value_, node, 'ShipDate') + self.ShipDate = value_ + self.ShipDate_nsprefix_ = child_.prefix + # validate type ShipDateType + self.validate_ShipDateType(self.ShipDate) + elif nodeName_ == 'CustomerRefNo': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerRefNo') + value_ = self.gds_validate_string(value_, node, 'CustomerRefNo') + self.CustomerRefNo = value_ + self.CustomerRefNo_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerRefNo2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerRefNo2') + value_ = self.gds_validate_string(value_, node, 'CustomerRefNo2') + self.CustomerRefNo2 = value_ + self.CustomerRefNo2_nsprefix_ = child_.prefix + elif nodeName_ == 'ExtraServices': + obj_ = ExtraServicesType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ExtraServices = obj_ + obj_.original_tagname_ = 'ExtraServices' + elif nodeName_ == 'HoldForPickup': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'HoldForPickup') + value_ = self.gds_validate_string(value_, node, 'HoldForPickup') + self.HoldForPickup = value_ + self.HoldForPickup_nsprefix_ = child_.prefix + elif nodeName_ == 'OpenDistribute': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'OpenDistribute') + value_ = self.gds_validate_string(value_, node, 'OpenDistribute') + self.OpenDistribute = value_ + self.OpenDistribute_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PermitNumber') + value_ = self.gds_validate_string(value_, node, 'PermitNumber') + self.PermitNumber = value_ + self.PermitNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitZIPCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PermitZIPCode') + value_ = self.gds_validate_string(value_, node, 'PermitZIPCode') + self.PermitZIPCode = value_ + self.PermitZIPCode_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitHolderName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PermitHolderName') + value_ = self.gds_validate_string(value_, node, 'PermitHolderName') + self.PermitHolderName = value_ + self.PermitHolderName_nsprefix_ = child_.prefix + elif nodeName_ == 'CRID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CRID') + value_ = self.gds_validate_string(value_, node, 'CRID') + self.CRID = value_ + self.CRID_nsprefix_ = child_.prefix + elif nodeName_ == 'MID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'MID') + value_ = self.gds_validate_string(value_, node, 'MID') + self.MID = value_ + self.MID_nsprefix_ = child_.prefix + elif nodeName_ == 'LogisticsManagerMID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LogisticsManagerMID') + value_ = self.gds_validate_string(value_, node, 'LogisticsManagerMID') + self.LogisticsManagerMID = value_ + self.LogisticsManagerMID_nsprefix_ = child_.prefix + elif nodeName_ == 'VendorCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'VendorCode') + value_ = self.gds_validate_string(value_, node, 'VendorCode') + self.VendorCode = value_ + self.VendorCode_nsprefix_ = child_.prefix + elif nodeName_ == 'VendorProductVersionNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'VendorProductVersionNumber') + value_ = self.gds_validate_string(value_, node, 'VendorProductVersionNumber') + self.VendorProductVersionNumber = value_ + self.VendorProductVersionNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderName') + value_ = self.gds_validate_string(value_, node, 'SenderName') + self.SenderName = value_ + self.SenderName_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderEMail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderEMail') + value_ = self.gds_validate_string(value_, node, 'SenderEMail') + self.SenderEMail = value_ + self.SenderEMail_nsprefix_ = child_.prefix + elif nodeName_ == 'RecipientName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RecipientName') + value_ = self.gds_validate_string(value_, node, 'RecipientName') + self.RecipientName = value_ + self.RecipientName_nsprefix_ = child_.prefix + elif nodeName_ == 'RecipientEMail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RecipientEMail') + value_ = self.gds_validate_string(value_, node, 'RecipientEMail') + self.RecipientEMail = value_ + self.RecipientEMail_nsprefix_ = child_.prefix + elif nodeName_ == 'ReceiptOption': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ReceiptOption') + value_ = self.gds_validate_string(value_, node, 'ReceiptOption') + self.ReceiptOption = value_ + self.ReceiptOption_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageType') + value_ = self.gds_validate_string(value_, node, 'ImageType') + self.ImageType = value_ + self.ImageType_nsprefix_ = child_.prefix + elif nodeName_ == 'HoldForManifest': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'HoldForManifest') + value_ = self.gds_validate_string(value_, node, 'HoldForManifest') + self.HoldForManifest = value_ + self.HoldForManifest_nsprefix_ = child_.prefix + elif nodeName_ == 'NineDigitRoutingZip': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'NineDigitRoutingZip') + ival_ = self.gds_validate_boolean(ival_, node, 'NineDigitRoutingZip') + self.NineDigitRoutingZip = ival_ + self.NineDigitRoutingZip_nsprefix_ = child_.prefix + elif nodeName_ == 'ShipInfo': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'ShipInfo') + ival_ = self.gds_validate_boolean(ival_, node, 'ShipInfo') + self.ShipInfo = ival_ + self.ShipInfo_nsprefix_ = child_.prefix + elif nodeName_ == 'CarrierRelease': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'CarrierRelease') + ival_ = self.gds_validate_boolean(ival_, node, 'CarrierRelease') + self.CarrierRelease = ival_ + self.CarrierRelease_nsprefix_ = child_.prefix + elif nodeName_ == 'DropOffTime': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'DropOffTime') + ival_ = self.gds_validate_boolean(ival_, node, 'DropOffTime') + self.DropOffTime = ival_ + self.DropOffTime_nsprefix_ = child_.prefix + elif nodeName_ == 'ReturnCommitments': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'ReturnCommitments') + ival_ = self.gds_validate_boolean(ival_, node, 'ReturnCommitments') + self.ReturnCommitments = ival_ + self.ReturnCommitments_nsprefix_ = child_.prefix + elif nodeName_ == 'PrintCustomerRefNo': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PrintCustomerRefNo') + value_ = self.gds_validate_string(value_, node, 'PrintCustomerRefNo') + self.PrintCustomerRefNo = value_ + self.PrintCustomerRefNo_nsprefix_ = child_.prefix + elif nodeName_ == 'Content': + obj_ = ContentType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Content = obj_ + obj_.original_tagname_ = 'Content' + elif nodeName_ == 'ActionCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ActionCode') + value_ = self.gds_validate_string(value_, node, 'ActionCode') + self.ActionCode = value_ + self.ActionCode_nsprefix_ = child_.prefix + elif nodeName_ == 'OptOutOfSPE': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'OptOutOfSPE') + value_ = self.gds_validate_string(value_, node, 'OptOutOfSPE') + self.OptOutOfSPE = value_ + self.OptOutOfSPE_nsprefix_ = child_.prefix + elif nodeName_ == 'SortationLevel': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SortationLevel') + value_ = self.gds_validate_string(value_, node, 'SortationLevel') + self.SortationLevel = value_ + self.SortationLevel_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationEntryFacilityType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DestinationEntryFacilityType') + value_ = self.gds_validate_string(value_, node, 'DestinationEntryFacilityType') + self.DestinationEntryFacilityType = value_ + self.DestinationEntryFacilityType_nsprefix_ = child_.prefix + elif nodeName_ == 'ShippingContents': + obj_ = ShippingContentsType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ShippingContents = obj_ + obj_.original_tagname_ = 'ShippingContents' + elif nodeName_ == 'CustomsContentType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomsContentType') + value_ = self.gds_validate_string(value_, node, 'CustomsContentType') + self.CustomsContentType = value_ + self.CustomsContentType_nsprefix_ = child_.prefix + elif nodeName_ == 'ContentComments': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentComments') + value_ = self.gds_validate_string(value_, node, 'ContentComments') + self.ContentComments = value_ + self.ContentComments_nsprefix_ = child_.prefix + elif nodeName_ == 'RestrictionType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RestrictionType') + value_ = self.gds_validate_string(value_, node, 'RestrictionType') + self.RestrictionType = value_ + self.RestrictionType_nsprefix_ = child_.prefix + elif nodeName_ == 'RestrictionComments': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RestrictionComments') + value_ = self.gds_validate_string(value_, node, 'RestrictionComments') + self.RestrictionComments = value_ + self.RestrictionComments_nsprefix_ = child_.prefix + elif nodeName_ == 'AESITN': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AESITN') + value_ = self.gds_validate_string(value_, node, 'AESITN') + self.AESITN = value_ + self.AESITN_nsprefix_ = child_.prefix + elif nodeName_ == 'ImportersReference': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImportersReference') + value_ = self.gds_validate_string(value_, node, 'ImportersReference') + self.ImportersReference = value_ + self.ImportersReference_nsprefix_ = child_.prefix + elif nodeName_ == 'ImportersContact': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImportersContact') + value_ = self.gds_validate_string(value_, node, 'ImportersContact') + self.ImportersContact = value_ + self.ImportersContact_nsprefix_ = child_.prefix + elif nodeName_ == 'ExportersReference': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ExportersReference') + value_ = self.gds_validate_string(value_, node, 'ExportersReference') + self.ExportersReference = value_ + self.ExportersReference_nsprefix_ = child_.prefix + elif nodeName_ == 'ExportersContact': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ExportersContact') + value_ = self.gds_validate_string(value_, node, 'ExportersContact') + self.ExportersContact = value_ + self.ExportersContact_nsprefix_ = child_.prefix + elif nodeName_ == 'InvoiceNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'InvoiceNumber') + value_ = self.gds_validate_string(value_, node, 'InvoiceNumber') + self.InvoiceNumber = value_ + self.InvoiceNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'LicenseNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LicenseNumber') + value_ = self.gds_validate_string(value_, node, 'LicenseNumber') + self.LicenseNumber = value_ + self.LicenseNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'CertificateNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CertificateNumber') + value_ = self.gds_validate_string(value_, node, 'CertificateNumber') + self.CertificateNumber = value_ + self.CertificateNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'NonDeliveryOption': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'NonDeliveryOption') + value_ = self.gds_validate_string(value_, node, 'NonDeliveryOption') + self.NonDeliveryOption = value_ + self.NonDeliveryOption_nsprefix_ = child_.prefix + elif nodeName_ == 'AltReturnAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AltReturnAddress1') + value_ = self.gds_validate_string(value_, node, 'AltReturnAddress1') + self.AltReturnAddress1 = value_ + self.AltReturnAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'AltReturnAddress2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AltReturnAddress2') + value_ = self.gds_validate_string(value_, node, 'AltReturnAddress2') + self.AltReturnAddress2 = value_ + self.AltReturnAddress2_nsprefix_ = child_.prefix + elif nodeName_ == 'AltReturnAddress3': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AltReturnAddress3') + value_ = self.gds_validate_string(value_, node, 'AltReturnAddress3') + self.AltReturnAddress3 = value_ + self.AltReturnAddress3_nsprefix_ = child_.prefix + elif nodeName_ == 'AltReturnAddress4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AltReturnAddress4') + value_ = self.gds_validate_string(value_, node, 'AltReturnAddress4') + self.AltReturnAddress4 = value_ + self.AltReturnAddress4_nsprefix_ = child_.prefix + elif nodeName_ == 'AltReturnAddress5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AltReturnAddress5') + value_ = self.gds_validate_string(value_, node, 'AltReturnAddress5') + self.AltReturnAddress5 = value_ + self.AltReturnAddress5_nsprefix_ = child_.prefix + elif nodeName_ == 'AltReturnAddress6': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AltReturnAddress6') + value_ = self.gds_validate_string(value_, node, 'AltReturnAddress6') + self.AltReturnAddress6 = value_ + self.AltReturnAddress6_nsprefix_ = child_.prefix + elif nodeName_ == 'AltReturnCountry': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AltReturnCountry') + value_ = self.gds_validate_string(value_, node, 'AltReturnCountry') + self.AltReturnCountry = value_ + self.AltReturnCountry_nsprefix_ = child_.prefix + elif nodeName_ == 'LabelImportType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LabelImportType') + value_ = self.gds_validate_string(value_, node, 'LabelImportType') + self.LabelImportType = value_ + self.LabelImportType_nsprefix_ = child_.prefix + elif nodeName_ == 'ePostageMailerReporting': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ePostageMailerReporting') + value_ = self.gds_validate_string(value_, node, 'ePostageMailerReporting') + self.ePostageMailerReporting = value_ + self.ePostageMailerReporting_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderFirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderFirstName') + value_ = self.gds_validate_string(value_, node, 'SenderFirstName') + self.SenderFirstName = value_ + self.SenderFirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderLastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderLastName') + value_ = self.gds_validate_string(value_, node, 'SenderLastName') + self.SenderLastName = value_ + self.SenderLastName_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderBusinessName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderBusinessName') + value_ = self.gds_validate_string(value_, node, 'SenderBusinessName') + self.SenderBusinessName = value_ + self.SenderBusinessName_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderAddress1') + value_ = self.gds_validate_string(value_, node, 'SenderAddress1') + self.SenderAddress1 = value_ + self.SenderAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderCity') + value_ = self.gds_validate_string(value_, node, 'SenderCity') + self.SenderCity = value_ + self.SenderCity_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderState') + value_ = self.gds_validate_string(value_, node, 'SenderState') + self.SenderState = value_ + self.SenderState_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderZip5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderZip5') + value_ = self.gds_validate_string(value_, node, 'SenderZip5') + self.SenderZip5 = value_ + self.SenderZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderPhone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderPhone') + value_ = self.gds_validate_string(value_, node, 'SenderPhone') + self.SenderPhone = value_ + self.SenderPhone_nsprefix_ = child_.prefix + elif nodeName_ == 'ChargebackCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ChargebackCode') + value_ = self.gds_validate_string(value_, node, 'ChargebackCode') + self.ChargebackCode = value_ + self.ChargebackCode_nsprefix_ = child_.prefix + elif nodeName_ == 'TrackingRetentionPeriod': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'TrackingRetentionPeriod') + value_ = self.gds_validate_string(value_, node, 'TrackingRetentionPeriod') + self.TrackingRetentionPeriod = value_ + self.TrackingRetentionPeriod_nsprefix_ = child_.prefix +# end class eVSRequest + + +class ImageParametersType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ImageParameter=None, XCoordinate=None, YCoordinate=None, LabelSequence=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ImageParameter = ImageParameter + self.ImageParameter_nsprefix_ = None + self.XCoordinate = XCoordinate + self.XCoordinate_nsprefix_ = None + self.YCoordinate = YCoordinate + self.YCoordinate_nsprefix_ = None + self.LabelSequence = LabelSequence + self.LabelSequence_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ImageParametersType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ImageParametersType.subclass: + return ImageParametersType.subclass(*args_, **kwargs_) + else: + return ImageParametersType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ImageParameter(self): + return self.ImageParameter + def set_ImageParameter(self, ImageParameter): + self.ImageParameter = ImageParameter + def get_XCoordinate(self): + return self.XCoordinate + def set_XCoordinate(self, XCoordinate): + self.XCoordinate = XCoordinate + def get_YCoordinate(self): + return self.YCoordinate + def set_YCoordinate(self, YCoordinate): + self.YCoordinate = YCoordinate + def get_LabelSequence(self): + return self.LabelSequence + def set_LabelSequence(self, LabelSequence): + self.LabelSequence = LabelSequence + def has__content(self): + if ( + self.ImageParameter is not None or + self.XCoordinate is not None or + self.YCoordinate is not None or + self.LabelSequence is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ImageParametersType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ImageParametersType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ImageParametersType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ImageParametersType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ImageParametersType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ImageParametersType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ImageParametersType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ImageParameter is not None: + namespaceprefix_ = self.ImageParameter_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageParameter_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageParameter>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageParameter), input_name='ImageParameter')), namespaceprefix_ , eol_)) + if self.XCoordinate is not None: + namespaceprefix_ = self.XCoordinate_nsprefix_ + ':' if (UseCapturedNS_ and self.XCoordinate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sXCoordinate>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.XCoordinate, input_name='XCoordinate'), namespaceprefix_ , eol_)) + if self.YCoordinate is not None: + namespaceprefix_ = self.YCoordinate_nsprefix_ + ':' if (UseCapturedNS_ and self.YCoordinate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sYCoordinate>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.YCoordinate, input_name='YCoordinate'), namespaceprefix_ , eol_)) + if self.LabelSequence is not None: + namespaceprefix_ = self.LabelSequence_nsprefix_ + ':' if (UseCapturedNS_ and self.LabelSequence_nsprefix_) else '' + self.LabelSequence.export(outfile, level, namespaceprefix_, namespacedef_='', name_='LabelSequence', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ImageParameter': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageParameter') + value_ = self.gds_validate_string(value_, node, 'ImageParameter') + self.ImageParameter = value_ + self.ImageParameter_nsprefix_ = child_.prefix + elif nodeName_ == 'XCoordinate' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'XCoordinate') + ival_ = self.gds_validate_integer(ival_, node, 'XCoordinate') + self.XCoordinate = ival_ + self.XCoordinate_nsprefix_ = child_.prefix + elif nodeName_ == 'YCoordinate' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'YCoordinate') + ival_ = self.gds_validate_integer(ival_, node, 'YCoordinate') + self.YCoordinate = ival_ + self.YCoordinate_nsprefix_ = child_.prefix + elif nodeName_ == 'LabelSequence': + obj_ = LabelSequenceType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.LabelSequence = obj_ + obj_.original_tagname_ = 'LabelSequence' +# end class ImageParametersType + + +class LabelSequenceType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, PackageNumber=None, TotalPackages=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.PackageNumber = PackageNumber + self.PackageNumber_nsprefix_ = None + self.TotalPackages = TotalPackages + self.TotalPackages_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, LabelSequenceType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if LabelSequenceType.subclass: + return LabelSequenceType.subclass(*args_, **kwargs_) + else: + return LabelSequenceType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_PackageNumber(self): + return self.PackageNumber + def set_PackageNumber(self, PackageNumber): + self.PackageNumber = PackageNumber + def get_TotalPackages(self): + return self.TotalPackages + def set_TotalPackages(self, TotalPackages): + self.TotalPackages = TotalPackages + def has__content(self): + if ( + self.PackageNumber is not None or + self.TotalPackages is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LabelSequenceType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('LabelSequenceType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'LabelSequenceType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LabelSequenceType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LabelSequenceType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LabelSequenceType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LabelSequenceType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.PackageNumber is not None: + namespaceprefix_ = self.PackageNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.PackageNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPackageNumber>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.PackageNumber, input_name='PackageNumber'), namespaceprefix_ , eol_)) + if self.TotalPackages is not None: + namespaceprefix_ = self.TotalPackages_nsprefix_ + ':' if (UseCapturedNS_ and self.TotalPackages_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTotalPackages>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.TotalPackages, input_name='TotalPackages'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'PackageNumber' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'PackageNumber') + ival_ = self.gds_validate_integer(ival_, node, 'PackageNumber') + self.PackageNumber = ival_ + self.PackageNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'TotalPackages' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'TotalPackages') + ival_ = self.gds_validate_integer(ival_, node, 'TotalPackages') + self.TotalPackages = ival_ + self.TotalPackages_nsprefix_ = child_.prefix +# end class LabelSequenceType + + +class ExpressMailOptionsType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, DeliveryOption=None, WaiverOfSignature=None, eSOFAllowed=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.DeliveryOption = DeliveryOption + self.DeliveryOption_nsprefix_ = None + self.WaiverOfSignature = WaiverOfSignature + self.WaiverOfSignature_nsprefix_ = None + self.eSOFAllowed = eSOFAllowed + self.eSOFAllowed_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExpressMailOptionsType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExpressMailOptionsType.subclass: + return ExpressMailOptionsType.subclass(*args_, **kwargs_) + else: + return ExpressMailOptionsType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_DeliveryOption(self): + return self.DeliveryOption + def set_DeliveryOption(self, DeliveryOption): + self.DeliveryOption = DeliveryOption + def get_WaiverOfSignature(self): + return self.WaiverOfSignature + def set_WaiverOfSignature(self, WaiverOfSignature): + self.WaiverOfSignature = WaiverOfSignature + def get_eSOFAllowed(self): + return self.eSOFAllowed + def set_eSOFAllowed(self, eSOFAllowed): + self.eSOFAllowed = eSOFAllowed + def has__content(self): + if ( + self.DeliveryOption is not None or + self.WaiverOfSignature is not None or + self.eSOFAllowed is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpressMailOptionsType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExpressMailOptionsType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExpressMailOptionsType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpressMailOptionsType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExpressMailOptionsType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExpressMailOptionsType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpressMailOptionsType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.DeliveryOption is not None: + namespaceprefix_ = self.DeliveryOption_nsprefix_ + ':' if (UseCapturedNS_ and self.DeliveryOption_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDeliveryOption>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DeliveryOption), input_name='DeliveryOption')), namespaceprefix_ , eol_)) + if self.WaiverOfSignature is not None: + namespaceprefix_ = self.WaiverOfSignature_nsprefix_ + ':' if (UseCapturedNS_ and self.WaiverOfSignature_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sWaiverOfSignature>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.WaiverOfSignature, input_name='WaiverOfSignature'), namespaceprefix_ , eol_)) + if self.eSOFAllowed is not None: + namespaceprefix_ = self.eSOFAllowed_nsprefix_ + ':' if (UseCapturedNS_ and self.eSOFAllowed_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%seSOFAllowed>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.eSOFAllowed, input_name='eSOFAllowed'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'DeliveryOption': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DeliveryOption') + value_ = self.gds_validate_string(value_, node, 'DeliveryOption') + self.DeliveryOption = value_ + self.DeliveryOption_nsprefix_ = child_.prefix + elif nodeName_ == 'WaiverOfSignature': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'WaiverOfSignature') + ival_ = self.gds_validate_boolean(ival_, node, 'WaiverOfSignature') + self.WaiverOfSignature = ival_ + self.WaiverOfSignature_nsprefix_ = child_.prefix + elif nodeName_ == 'eSOFAllowed': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'eSOFAllowed') + ival_ = self.gds_validate_boolean(ival_, node, 'eSOFAllowed') + self.eSOFAllowed = ival_ + self.eSOFAllowed_nsprefix_ = child_.prefix +# end class ExpressMailOptionsType + + +class ExtraServicesType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ExtraService=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if ExtraService is None: + self.ExtraService = [] + else: + self.ExtraService = ExtraService + self.ExtraService_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExtraServicesType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExtraServicesType.subclass: + return ExtraServicesType.subclass(*args_, **kwargs_) + else: + return ExtraServicesType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ExtraService(self): + return self.ExtraService + def set_ExtraService(self, ExtraService): + self.ExtraService = ExtraService + def add_ExtraService(self, value): + self.ExtraService.append(value) + def insert_ExtraService_at(self, index, value): + self.ExtraService.insert(index, value) + def replace_ExtraService_at(self, index, value): + self.ExtraService[index] = value + def has__content(self): + if ( + self.ExtraService + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExtraServicesType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExtraServicesType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtraServicesType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtraServicesType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtraServicesType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for ExtraService_ in self.ExtraService: + namespaceprefix_ = self.ExtraService_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraService_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sExtraService>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(ExtraService_), input_name='ExtraService')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ExtraService': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ExtraService') + value_ = self.gds_validate_string(value_, node, 'ExtraService') + self.ExtraService.append(value_) + self.ExtraService_nsprefix_ = child_.prefix +# end class ExtraServicesType + + +class ContentType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ContentType_member=None, ContentDescription=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ContentType = ContentType_member + self.ContentType_nsprefix_ = None + self.ContentDescription = ContentDescription + self.ContentDescription_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ContentType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ContentType.subclass: + return ContentType.subclass(*args_, **kwargs_) + else: + return ContentType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ContentType(self): + return self.ContentType + def set_ContentType(self, ContentType): + self.ContentType = ContentType + def get_ContentDescription(self): + return self.ContentDescription + def set_ContentDescription(self, ContentDescription): + self.ContentDescription = ContentDescription + def has__content(self): + if ( + self.ContentType is not None or + self.ContentDescription is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContentType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ContentType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ContentType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContentType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ContentType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ContentType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContentType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ContentType is not None: + namespaceprefix_ = self.ContentType_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentType), input_name='ContentType')), namespaceprefix_ , eol_)) + if self.ContentDescription is not None: + namespaceprefix_ = self.ContentDescription_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentDescription_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentDescription>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentDescription), input_name='ContentDescription')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ContentType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentType') + value_ = self.gds_validate_string(value_, node, 'ContentType') + self.ContentType = value_ + self.ContentType_nsprefix_ = child_.prefix + elif nodeName_ == 'ContentDescription': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentDescription') + value_ = self.gds_validate_string(value_, node, 'ContentDescription') + self.ContentDescription = value_ + self.ContentDescription_nsprefix_ = child_.prefix +# end class ContentType + + +class ShippingContentsType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ItemDetail=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if ItemDetail is None: + self.ItemDetail = [] + else: + self.ItemDetail = ItemDetail + self.ItemDetail_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ShippingContentsType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ShippingContentsType.subclass: + return ShippingContentsType.subclass(*args_, **kwargs_) + else: + return ShippingContentsType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ItemDetail(self): + return self.ItemDetail + def set_ItemDetail(self, ItemDetail): + self.ItemDetail = ItemDetail + def add_ItemDetail(self, value): + self.ItemDetail.append(value) + def insert_ItemDetail_at(self, index, value): + self.ItemDetail.insert(index, value) + def replace_ItemDetail_at(self, index, value): + self.ItemDetail[index] = value + def has__content(self): + if ( + self.ItemDetail + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ShippingContentsType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ShippingContentsType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ShippingContentsType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ShippingContentsType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ShippingContentsType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ShippingContentsType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ShippingContentsType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for ItemDetail_ in self.ItemDetail: + namespaceprefix_ = self.ItemDetail_nsprefix_ + ':' if (UseCapturedNS_ and self.ItemDetail_nsprefix_) else '' + ItemDetail_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ItemDetail', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ItemDetail': + obj_ = ItemDetailType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ItemDetail.append(obj_) + obj_.original_tagname_ = 'ItemDetail' +# end class ShippingContentsType + + +class ItemDetailType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Description=None, Quantity=None, Value=None, NetPounds=None, NetOunces=None, HSTariffNumber=None, CountryOfOrigin=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Description = Description + self.Description_nsprefix_ = None + self.Quantity = Quantity + self.Quantity_nsprefix_ = None + self.Value = Value + self.Value_nsprefix_ = None + self.NetPounds = NetPounds + self.NetPounds_nsprefix_ = None + self.NetOunces = NetOunces + self.NetOunces_nsprefix_ = None + self.HSTariffNumber = HSTariffNumber + self.HSTariffNumber_nsprefix_ = None + self.CountryOfOrigin = CountryOfOrigin + self.CountryOfOrigin_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ItemDetailType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ItemDetailType.subclass: + return ItemDetailType.subclass(*args_, **kwargs_) + else: + return ItemDetailType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Description(self): + return self.Description + def set_Description(self, Description): + self.Description = Description + def get_Quantity(self): + return self.Quantity + def set_Quantity(self, Quantity): + self.Quantity = Quantity + def get_Value(self): + return self.Value + def set_Value(self, Value): + self.Value = Value + def get_NetPounds(self): + return self.NetPounds + def set_NetPounds(self, NetPounds): + self.NetPounds = NetPounds + def get_NetOunces(self): + return self.NetOunces + def set_NetOunces(self, NetOunces): + self.NetOunces = NetOunces + def get_HSTariffNumber(self): + return self.HSTariffNumber + def set_HSTariffNumber(self, HSTariffNumber): + self.HSTariffNumber = HSTariffNumber + def get_CountryOfOrigin(self): + return self.CountryOfOrigin + def set_CountryOfOrigin(self, CountryOfOrigin): + self.CountryOfOrigin = CountryOfOrigin + def has__content(self): + if ( + self.Description is not None or + self.Quantity is not None or + self.Value is not None or + self.NetPounds is not None or + self.NetOunces is not None or + self.HSTariffNumber is not None or + self.CountryOfOrigin is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ItemDetailType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ItemDetailType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ItemDetailType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ItemDetailType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ItemDetailType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ItemDetailType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ItemDetailType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Description is not None: + namespaceprefix_ = self.Description_nsprefix_ + ':' if (UseCapturedNS_ and self.Description_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDescription>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Description), input_name='Description')), namespaceprefix_ , eol_)) + if self.Quantity is not None: + namespaceprefix_ = self.Quantity_nsprefix_ + ':' if (UseCapturedNS_ and self.Quantity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sQuantity>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Quantity, input_name='Quantity'), namespaceprefix_ , eol_)) + if self.Value is not None: + namespaceprefix_ = self.Value_nsprefix_ + ':' if (UseCapturedNS_ and self.Value_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sValue>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Value, input_name='Value'), namespaceprefix_ , eol_)) + if self.NetPounds is not None: + namespaceprefix_ = self.NetPounds_nsprefix_ + ':' if (UseCapturedNS_ and self.NetPounds_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNetPounds>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.NetPounds), input_name='NetPounds')), namespaceprefix_ , eol_)) + if self.NetOunces is not None: + namespaceprefix_ = self.NetOunces_nsprefix_ + ':' if (UseCapturedNS_ and self.NetOunces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNetOunces>%s%s' % (namespaceprefix_ , self.gds_format_float(self.NetOunces, input_name='NetOunces'), namespaceprefix_ , eol_)) + if self.HSTariffNumber is not None: + namespaceprefix_ = self.HSTariffNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.HSTariffNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHSTariffNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.HSTariffNumber), input_name='HSTariffNumber')), namespaceprefix_ , eol_)) + if self.CountryOfOrigin is not None: + namespaceprefix_ = self.CountryOfOrigin_nsprefix_ + ':' if (UseCapturedNS_ and self.CountryOfOrigin_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCountryOfOrigin>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CountryOfOrigin), input_name='CountryOfOrigin')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Description': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Description') + value_ = self.gds_validate_string(value_, node, 'Description') + self.Description = value_ + self.Description_nsprefix_ = child_.prefix + elif nodeName_ == 'Quantity' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Quantity') + ival_ = self.gds_validate_integer(ival_, node, 'Quantity') + self.Quantity = ival_ + self.Quantity_nsprefix_ = child_.prefix + elif nodeName_ == 'Value' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Value') + fval_ = self.gds_validate_float(fval_, node, 'Value') + self.Value = fval_ + self.Value_nsprefix_ = child_.prefix + elif nodeName_ == 'NetPounds': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'NetPounds') + value_ = self.gds_validate_string(value_, node, 'NetPounds') + self.NetPounds = value_ + self.NetPounds_nsprefix_ = child_.prefix + elif nodeName_ == 'NetOunces' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'NetOunces') + fval_ = self.gds_validate_float(fval_, node, 'NetOunces') + self.NetOunces = fval_ + self.NetOunces_nsprefix_ = child_.prefix + elif nodeName_ == 'HSTariffNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'HSTariffNumber') + value_ = self.gds_validate_string(value_, node, 'HSTariffNumber') + self.HSTariffNumber = value_ + self.HSTariffNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'CountryOfOrigin': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CountryOfOrigin') + value_ = self.gds_validate_string(value_, node, 'CountryOfOrigin') + self.CountryOfOrigin = value_ + self.CountryOfOrigin_nsprefix_ = child_.prefix +# end class ItemDetailType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSRequest' + rootClass = eVSRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSRequest' + rootClass = eVSRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSRequest' + rootClass = eVSRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSRequest' + rootClass = eVSRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from evs_request import *\n\n') + sys.stdout.write('import evs_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "ContentType", + "ExpressMailOptionsType", + "ExtraServicesType", + "ImageParametersType", + "ItemDetailType", + "LabelSequenceType", + "ShippingContentsType", + "eVSRequest" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/evs_response.py b/modules/connectors/usps/karrio/schemas/usps/evs_response.py new file mode 100644 index 0000000000..2083b045f3 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/evs_response.py @@ -0,0 +1,1979 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:48 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/evs_response.py') +# +# Command line arguments: +# ./schemas/eVSResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/evs_response.py" ./schemas/eVSResponse.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class eVSResponse(GeneratedsSuper): + """LabelImage -- over 115000 suppressed + + """ + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, BarcodeNumber=None, LabelImage=None, ToName=None, ToFirm=None, ToAddress1=None, ToAddress2=None, ToCity=None, ToState=None, ToZip5=None, ToZip4=None, Postnet=None, RDC=None, Postage=None, ExtraServices=None, Zone=None, CarrierRoute=None, PermitHolderName=None, InductionType=None, LogMessage=None, Commitment=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.BarcodeNumber = BarcodeNumber + self.BarcodeNumber_nsprefix_ = None + self.LabelImage = LabelImage + self.LabelImage_nsprefix_ = None + self.ToName = ToName + self.ToName_nsprefix_ = None + self.ToFirm = ToFirm + self.ToFirm_nsprefix_ = None + self.ToAddress1 = ToAddress1 + self.ToAddress1_nsprefix_ = None + self.ToAddress2 = ToAddress2 + self.ToAddress2_nsprefix_ = None + self.ToCity = ToCity + self.ToCity_nsprefix_ = None + self.ToState = ToState + self.ToState_nsprefix_ = None + self.ToZip5 = ToZip5 + self.ToZip5_nsprefix_ = None + self.ToZip4 = ToZip4 + self.ToZip4_nsprefix_ = None + self.Postnet = Postnet + self.Postnet_nsprefix_ = None + self.RDC = RDC + self.RDC_nsprefix_ = None + self.Postage = Postage + self.Postage_nsprefix_ = None + self.ExtraServices = ExtraServices + self.ExtraServices_nsprefix_ = None + self.Zone = Zone + self.Zone_nsprefix_ = None + self.CarrierRoute = CarrierRoute + self.CarrierRoute_nsprefix_ = None + self.PermitHolderName = PermitHolderName + self.PermitHolderName_nsprefix_ = None + self.InductionType = InductionType + self.InductionType_nsprefix_ = None + self.LogMessage = LogMessage + self.LogMessage_nsprefix_ = None + self.Commitment = Commitment + self.Commitment_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, eVSResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if eVSResponse.subclass: + return eVSResponse.subclass(*args_, **kwargs_) + else: + return eVSResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_BarcodeNumber(self): + return self.BarcodeNumber + def set_BarcodeNumber(self, BarcodeNumber): + self.BarcodeNumber = BarcodeNumber + def get_LabelImage(self): + return self.LabelImage + def set_LabelImage(self, LabelImage): + self.LabelImage = LabelImage + def get_ToName(self): + return self.ToName + def set_ToName(self, ToName): + self.ToName = ToName + def get_ToFirm(self): + return self.ToFirm + def set_ToFirm(self, ToFirm): + self.ToFirm = ToFirm + def get_ToAddress1(self): + return self.ToAddress1 + def set_ToAddress1(self, ToAddress1): + self.ToAddress1 = ToAddress1 + def get_ToAddress2(self): + return self.ToAddress2 + def set_ToAddress2(self, ToAddress2): + self.ToAddress2 = ToAddress2 + def get_ToCity(self): + return self.ToCity + def set_ToCity(self, ToCity): + self.ToCity = ToCity + def get_ToState(self): + return self.ToState + def set_ToState(self, ToState): + self.ToState = ToState + def get_ToZip5(self): + return self.ToZip5 + def set_ToZip5(self, ToZip5): + self.ToZip5 = ToZip5 + def get_ToZip4(self): + return self.ToZip4 + def set_ToZip4(self, ToZip4): + self.ToZip4 = ToZip4 + def get_Postnet(self): + return self.Postnet + def set_Postnet(self, Postnet): + self.Postnet = Postnet + def get_RDC(self): + return self.RDC + def set_RDC(self, RDC): + self.RDC = RDC + def get_Postage(self): + return self.Postage + def set_Postage(self, Postage): + self.Postage = Postage + def get_ExtraServices(self): + return self.ExtraServices + def set_ExtraServices(self, ExtraServices): + self.ExtraServices = ExtraServices + def get_Zone(self): + return self.Zone + def set_Zone(self, Zone): + self.Zone = Zone + def get_CarrierRoute(self): + return self.CarrierRoute + def set_CarrierRoute(self, CarrierRoute): + self.CarrierRoute = CarrierRoute + def get_PermitHolderName(self): + return self.PermitHolderName + def set_PermitHolderName(self, PermitHolderName): + self.PermitHolderName = PermitHolderName + def get_InductionType(self): + return self.InductionType + def set_InductionType(self, InductionType): + self.InductionType = InductionType + def get_LogMessage(self): + return self.LogMessage + def set_LogMessage(self, LogMessage): + self.LogMessage = LogMessage + def get_Commitment(self): + return self.Commitment + def set_Commitment(self, Commitment): + self.Commitment = Commitment + def has__content(self): + if ( + self.BarcodeNumber is not None or + self.LabelImage is not None or + self.ToName is not None or + self.ToFirm is not None or + self.ToAddress1 is not None or + self.ToAddress2 is not None or + self.ToCity is not None or + self.ToState is not None or + self.ToZip5 is not None or + self.ToZip4 is not None or + self.Postnet is not None or + self.RDC is not None or + self.Postage is not None or + self.ExtraServices is not None or + self.Zone is not None or + self.CarrierRoute is not None or + self.PermitHolderName is not None or + self.InductionType is not None or + self.LogMessage is not None or + self.Commitment is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('eVSResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'eVSResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='eVSResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='eVSResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='eVSResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.BarcodeNumber is not None: + namespaceprefix_ = self.BarcodeNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.BarcodeNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBarcodeNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BarcodeNumber), input_name='BarcodeNumber')), namespaceprefix_ , eol_)) + if self.LabelImage is not None: + namespaceprefix_ = self.LabelImage_nsprefix_ + ':' if (UseCapturedNS_ and self.LabelImage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLabelImage>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LabelImage), input_name='LabelImage')), namespaceprefix_ , eol_)) + if self.ToName is not None: + namespaceprefix_ = self.ToName_nsprefix_ + ':' if (UseCapturedNS_ and self.ToName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToName), input_name='ToName')), namespaceprefix_ , eol_)) + if self.ToFirm is not None: + namespaceprefix_ = self.ToFirm_nsprefix_ + ':' if (UseCapturedNS_ and self.ToFirm_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToFirm>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToFirm), input_name='ToFirm')), namespaceprefix_ , eol_)) + if self.ToAddress1 is not None: + namespaceprefix_ = self.ToAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress1), input_name='ToAddress1')), namespaceprefix_ , eol_)) + if self.ToAddress2 is not None: + namespaceprefix_ = self.ToAddress2_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress2), input_name='ToAddress2')), namespaceprefix_ , eol_)) + if self.ToCity is not None: + namespaceprefix_ = self.ToCity_nsprefix_ + ':' if (UseCapturedNS_ and self.ToCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToCity), input_name='ToCity')), namespaceprefix_ , eol_)) + if self.ToState is not None: + namespaceprefix_ = self.ToState_nsprefix_ + ':' if (UseCapturedNS_ and self.ToState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToState), input_name='ToState')), namespaceprefix_ , eol_)) + if self.ToZip5 is not None: + namespaceprefix_ = self.ToZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.ToZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToZip5>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ToZip5, input_name='ToZip5'), namespaceprefix_ , eol_)) + if self.ToZip4 is not None: + namespaceprefix_ = self.ToZip4_nsprefix_ + ':' if (UseCapturedNS_ and self.ToZip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToZip4>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ToZip4, input_name='ToZip4'), namespaceprefix_ , eol_)) + if self.Postnet is not None: + namespaceprefix_ = self.Postnet_nsprefix_ + ':' if (UseCapturedNS_ and self.Postnet_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPostnet>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Postnet, input_name='Postnet'), namespaceprefix_ , eol_)) + if self.RDC is not None: + namespaceprefix_ = self.RDC_nsprefix_ + ':' if (UseCapturedNS_ and self.RDC_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRDC>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RDC), input_name='RDC')), namespaceprefix_ , eol_)) + if self.Postage is not None: + namespaceprefix_ = self.Postage_nsprefix_ + ':' if (UseCapturedNS_ and self.Postage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPostage>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Postage, input_name='Postage'), namespaceprefix_ , eol_)) + if self.ExtraServices is not None: + namespaceprefix_ = self.ExtraServices_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraServices_nsprefix_) else '' + self.ExtraServices.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ExtraServices', pretty_print=pretty_print) + if self.Zone is not None: + namespaceprefix_ = self.Zone_nsprefix_ + ':' if (UseCapturedNS_ and self.Zone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Zone), input_name='Zone')), namespaceprefix_ , eol_)) + if self.CarrierRoute is not None: + namespaceprefix_ = self.CarrierRoute_nsprefix_ + ':' if (UseCapturedNS_ and self.CarrierRoute_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCarrierRoute>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CarrierRoute), input_name='CarrierRoute')), namespaceprefix_ , eol_)) + if self.PermitHolderName is not None: + namespaceprefix_ = self.PermitHolderName_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitHolderName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitHolderName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PermitHolderName), input_name='PermitHolderName')), namespaceprefix_ , eol_)) + if self.InductionType is not None: + namespaceprefix_ = self.InductionType_nsprefix_ + ':' if (UseCapturedNS_ and self.InductionType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInductionType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InductionType), input_name='InductionType')), namespaceprefix_ , eol_)) + if self.LogMessage is not None: + namespaceprefix_ = self.LogMessage_nsprefix_ + ':' if (UseCapturedNS_ and self.LogMessage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLogMessage>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LogMessage), input_name='LogMessage')), namespaceprefix_ , eol_)) + if self.Commitment is not None: + namespaceprefix_ = self.Commitment_nsprefix_ + ':' if (UseCapturedNS_ and self.Commitment_nsprefix_) else '' + self.Commitment.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Commitment', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'BarcodeNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BarcodeNumber') + value_ = self.gds_validate_string(value_, node, 'BarcodeNumber') + self.BarcodeNumber = value_ + self.BarcodeNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'LabelImage': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LabelImage') + value_ = self.gds_validate_string(value_, node, 'LabelImage') + self.LabelImage = value_ + self.LabelImage_nsprefix_ = child_.prefix + elif nodeName_ == 'ToName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToName') + value_ = self.gds_validate_string(value_, node, 'ToName') + self.ToName = value_ + self.ToName_nsprefix_ = child_.prefix + elif nodeName_ == 'ToFirm': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToFirm') + value_ = self.gds_validate_string(value_, node, 'ToFirm') + self.ToFirm = value_ + self.ToFirm_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress1') + value_ = self.gds_validate_string(value_, node, 'ToAddress1') + self.ToAddress1 = value_ + self.ToAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress2') + value_ = self.gds_validate_string(value_, node, 'ToAddress2') + self.ToAddress2 = value_ + self.ToAddress2_nsprefix_ = child_.prefix + elif nodeName_ == 'ToCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToCity') + value_ = self.gds_validate_string(value_, node, 'ToCity') + self.ToCity = value_ + self.ToCity_nsprefix_ = child_.prefix + elif nodeName_ == 'ToState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToState') + value_ = self.gds_validate_string(value_, node, 'ToState') + self.ToState = value_ + self.ToState_nsprefix_ = child_.prefix + elif nodeName_ == 'ToZip5' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ToZip5') + ival_ = self.gds_validate_integer(ival_, node, 'ToZip5') + self.ToZip5 = ival_ + self.ToZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'ToZip4' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ToZip4') + ival_ = self.gds_validate_integer(ival_, node, 'ToZip4') + self.ToZip4 = ival_ + self.ToZip4_nsprefix_ = child_.prefix + elif nodeName_ == 'Postnet' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Postnet') + ival_ = self.gds_validate_integer(ival_, node, 'Postnet') + self.Postnet = ival_ + self.Postnet_nsprefix_ = child_.prefix + elif nodeName_ == 'RDC': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RDC') + value_ = self.gds_validate_string(value_, node, 'RDC') + self.RDC = value_ + self.RDC_nsprefix_ = child_.prefix + elif nodeName_ == 'Postage' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Postage') + fval_ = self.gds_validate_float(fval_, node, 'Postage') + self.Postage = fval_ + self.Postage_nsprefix_ = child_.prefix + elif nodeName_ == 'ExtraServices': + obj_ = ExtraServicesType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ExtraServices = obj_ + obj_.original_tagname_ = 'ExtraServices' + elif nodeName_ == 'Zone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Zone') + value_ = self.gds_validate_string(value_, node, 'Zone') + self.Zone = value_ + self.Zone_nsprefix_ = child_.prefix + elif nodeName_ == 'CarrierRoute': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CarrierRoute') + value_ = self.gds_validate_string(value_, node, 'CarrierRoute') + self.CarrierRoute = value_ + self.CarrierRoute_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitHolderName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PermitHolderName') + value_ = self.gds_validate_string(value_, node, 'PermitHolderName') + self.PermitHolderName = value_ + self.PermitHolderName_nsprefix_ = child_.prefix + elif nodeName_ == 'InductionType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'InductionType') + value_ = self.gds_validate_string(value_, node, 'InductionType') + self.InductionType = value_ + self.InductionType_nsprefix_ = child_.prefix + elif nodeName_ == 'LogMessage': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LogMessage') + value_ = self.gds_validate_string(value_, node, 'LogMessage') + self.LogMessage = value_ + self.LogMessage_nsprefix_ = child_.prefix + elif nodeName_ == 'Commitment': + obj_ = CommitmentType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Commitment = obj_ + obj_.original_tagname_ = 'Commitment' +# end class eVSResponse + + +class ExtraServicesType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ExtraService=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if ExtraService is None: + self.ExtraService = [] + else: + self.ExtraService = ExtraService + self.ExtraService_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExtraServicesType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExtraServicesType.subclass: + return ExtraServicesType.subclass(*args_, **kwargs_) + else: + return ExtraServicesType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ExtraService(self): + return self.ExtraService + def set_ExtraService(self, ExtraService): + self.ExtraService = ExtraService + def add_ExtraService(self, value): + self.ExtraService.append(value) + def insert_ExtraService_at(self, index, value): + self.ExtraService.insert(index, value) + def replace_ExtraService_at(self, index, value): + self.ExtraService[index] = value + def has__content(self): + if ( + self.ExtraService + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExtraServicesType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExtraServicesType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtraServicesType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtraServicesType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtraServicesType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for ExtraService_ in self.ExtraService: + namespaceprefix_ = self.ExtraService_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraService_nsprefix_) else '' + ExtraService_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ExtraService', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ExtraService': + obj_ = ExtraServiceType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ExtraService.append(obj_) + obj_.original_tagname_ = 'ExtraService' +# end class ExtraServicesType + + +class ExtraServiceType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ServiceID=None, ServiceName=None, Price=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ServiceID = ServiceID + self.ServiceID_nsprefix_ = None + self.ServiceName = ServiceName + self.ServiceName_nsprefix_ = None + self.Price = Price + self.Price_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExtraServiceType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExtraServiceType.subclass: + return ExtraServiceType.subclass(*args_, **kwargs_) + else: + return ExtraServiceType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ServiceID(self): + return self.ServiceID + def set_ServiceID(self, ServiceID): + self.ServiceID = ServiceID + def get_ServiceName(self): + return self.ServiceName + def set_ServiceName(self, ServiceName): + self.ServiceName = ServiceName + def get_Price(self): + return self.Price + def set_Price(self, Price): + self.Price = Price + def has__content(self): + if ( + self.ServiceID is not None or + self.ServiceName is not None or + self.Price is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServiceType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExtraServiceType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExtraServiceType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtraServiceType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtraServiceType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtraServiceType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServiceType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ServiceID is not None: + namespaceprefix_ = self.ServiceID_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceID>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ServiceID, input_name='ServiceID'), namespaceprefix_ , eol_)) + if self.ServiceName is not None: + namespaceprefix_ = self.ServiceName_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceName), input_name='ServiceName')), namespaceprefix_ , eol_)) + if self.Price is not None: + namespaceprefix_ = self.Price_nsprefix_ + ':' if (UseCapturedNS_ and self.Price_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPrice>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Price, input_name='Price'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ServiceID' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ServiceID') + ival_ = self.gds_validate_integer(ival_, node, 'ServiceID') + self.ServiceID = ival_ + self.ServiceID_nsprefix_ = child_.prefix + elif nodeName_ == 'ServiceName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceName') + value_ = self.gds_validate_string(value_, node, 'ServiceName') + self.ServiceName = value_ + self.ServiceName_nsprefix_ = child_.prefix + elif nodeName_ == 'Price' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Price') + fval_ = self.gds_validate_float(fval_, node, 'Price') + self.Price = fval_ + self.Price_nsprefix_ = child_.prefix +# end class ExtraServiceType + + +class CommitmentType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, CommitmentName=None, ScheduledDeliveryDate=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.CommitmentName = CommitmentName + self.CommitmentName_nsprefix_ = None + self.ScheduledDeliveryDate = ScheduledDeliveryDate + self.ScheduledDeliveryDate_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CommitmentType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CommitmentType.subclass: + return CommitmentType.subclass(*args_, **kwargs_) + else: + return CommitmentType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_CommitmentName(self): + return self.CommitmentName + def set_CommitmentName(self, CommitmentName): + self.CommitmentName = CommitmentName + def get_ScheduledDeliveryDate(self): + return self.ScheduledDeliveryDate + def set_ScheduledDeliveryDate(self, ScheduledDeliveryDate): + self.ScheduledDeliveryDate = ScheduledDeliveryDate + def has__content(self): + if ( + self.CommitmentName is not None or + self.ScheduledDeliveryDate is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CommitmentType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CommitmentType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CommitmentType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CommitmentType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CommitmentType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CommitmentType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CommitmentType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.CommitmentName is not None: + namespaceprefix_ = self.CommitmentName_nsprefix_ + ':' if (UseCapturedNS_ and self.CommitmentName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommitmentName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CommitmentName), input_name='CommitmentName')), namespaceprefix_ , eol_)) + if self.ScheduledDeliveryDate is not None: + namespaceprefix_ = self.ScheduledDeliveryDate_nsprefix_ + ':' if (UseCapturedNS_ and self.ScheduledDeliveryDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sScheduledDeliveryDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ScheduledDeliveryDate), input_name='ScheduledDeliveryDate')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'CommitmentName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CommitmentName') + value_ = self.gds_validate_string(value_, node, 'CommitmentName') + self.CommitmentName = value_ + self.CommitmentName_nsprefix_ = child_.prefix + elif nodeName_ == 'ScheduledDeliveryDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ScheduledDeliveryDate') + value_ = self.gds_validate_string(value_, node, 'ScheduledDeliveryDate') + self.ScheduledDeliveryDate = value_ + self.ScheduledDeliveryDate_nsprefix_ = child_.prefix +# end class CommitmentType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSResponse' + rootClass = eVSResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSResponse' + rootClass = eVSResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSResponse' + rootClass = eVSResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSResponse' + rootClass = eVSResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from evs_response import *\n\n') + sys.stdout.write('import evs_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CommitmentType", + "ExtraServiceType", + "ExtraServicesType", + "eVSResponse" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/evsi_cancel_request.py b/modules/connectors/usps/karrio/schemas/usps/evsi_cancel_request.py new file mode 100644 index 0000000000..4abbf76c00 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/evsi_cancel_request.py @@ -0,0 +1,1335 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:49 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/evsi_cancel_request.py') +# +# Command line arguments: +# ./schemas/eVSICancelRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/evsi_cancel_request.py" ./schemas/eVSICancelRequest.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class eVSICancelRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, BarcodeNumber=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.BarcodeNumber = BarcodeNumber + self.BarcodeNumber_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, eVSICancelRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if eVSICancelRequest.subclass: + return eVSICancelRequest.subclass(*args_, **kwargs_) + else: + return eVSICancelRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_BarcodeNumber(self): + return self.BarcodeNumber + def set_BarcodeNumber(self, BarcodeNumber): + self.BarcodeNumber = BarcodeNumber + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.BarcodeNumber is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSICancelRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('eVSICancelRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'eVSICancelRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='eVSICancelRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='eVSICancelRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='eVSICancelRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSICancelRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.BarcodeNumber is not None: + namespaceprefix_ = self.BarcodeNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.BarcodeNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBarcodeNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BarcodeNumber), input_name='BarcodeNumber')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'BarcodeNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BarcodeNumber') + value_ = self.gds_validate_string(value_, node, 'BarcodeNumber') + self.BarcodeNumber = value_ + self.BarcodeNumber_nsprefix_ = child_.prefix +# end class eVSICancelRequest + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSICancelRequest' + rootClass = eVSICancelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSICancelRequest' + rootClass = eVSICancelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSICancelRequest' + rootClass = eVSICancelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSICancelRequest' + rootClass = eVSICancelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from evsi_cancel_request import *\n\n') + sys.stdout.write('import evsi_cancel_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "eVSICancelRequest" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/evsi_cancel_response.py b/modules/connectors/usps/karrio/schemas/usps/evsi_cancel_response.py new file mode 100644 index 0000000000..dc17cfe1c7 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/evsi_cancel_response.py @@ -0,0 +1,1345 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:49 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/evsi_cancel_response.py') +# +# Command line arguments: +# ./schemas/eVSICancelResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/evsi_cancel_response.py" ./schemas/eVSICancelResponse.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class eVSICancelResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, BarcodeNumber=None, Status=None, Reason=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.BarcodeNumber = BarcodeNumber + self.BarcodeNumber_nsprefix_ = None + self.Status = Status + self.Status_nsprefix_ = None + self.Reason = Reason + self.Reason_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, eVSICancelResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if eVSICancelResponse.subclass: + return eVSICancelResponse.subclass(*args_, **kwargs_) + else: + return eVSICancelResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_BarcodeNumber(self): + return self.BarcodeNumber + def set_BarcodeNumber(self, BarcodeNumber): + self.BarcodeNumber = BarcodeNumber + def get_Status(self): + return self.Status + def set_Status(self, Status): + self.Status = Status + def get_Reason(self): + return self.Reason + def set_Reason(self, Reason): + self.Reason = Reason + def has__content(self): + if ( + self.BarcodeNumber is not None or + self.Status is not None or + self.Reason is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSICancelResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('eVSICancelResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'eVSICancelResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='eVSICancelResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='eVSICancelResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='eVSICancelResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSICancelResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.BarcodeNumber is not None: + namespaceprefix_ = self.BarcodeNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.BarcodeNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBarcodeNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BarcodeNumber), input_name='BarcodeNumber')), namespaceprefix_ , eol_)) + if self.Status is not None: + namespaceprefix_ = self.Status_nsprefix_ + ':' if (UseCapturedNS_ and self.Status_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sStatus>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Status), input_name='Status')), namespaceprefix_ , eol_)) + if self.Reason is not None: + namespaceprefix_ = self.Reason_nsprefix_ + ':' if (UseCapturedNS_ and self.Reason_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sReason>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Reason), input_name='Reason')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'BarcodeNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BarcodeNumber') + value_ = self.gds_validate_string(value_, node, 'BarcodeNumber') + self.BarcodeNumber = value_ + self.BarcodeNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'Status': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Status') + value_ = self.gds_validate_string(value_, node, 'Status') + self.Status = value_ + self.Status_nsprefix_ = child_.prefix + elif nodeName_ == 'Reason': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Reason') + value_ = self.gds_validate_string(value_, node, 'Reason') + self.Reason = value_ + self.Reason_nsprefix_ = child_.prefix +# end class eVSICancelResponse + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSICancelResponse' + rootClass = eVSICancelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSICancelResponse' + rootClass = eVSICancelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSICancelResponse' + rootClass = eVSICancelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSICancelResponse' + rootClass = eVSICancelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from evsi_cancel_response import *\n\n') + sys.stdout.write('import evsi_cancel_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "eVSICancelResponse" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/express_mail_commitment_request.py b/modules/connectors/usps/karrio/schemas/usps/express_mail_commitment_request.py new file mode 100644 index 0000000000..d74fcc7f6e --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/express_mail_commitment_request.py @@ -0,0 +1,1437 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:41 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/express_mail_commitment_request.py') +# +# Command line arguments: +# ./schemas/ExpressMailCommitmentRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/express_mail_commitment_request.py" ./schemas/ExpressMailCommitmentRequest.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class ExpressMailCommitmentRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, OriginZip=None, DestinationZip=None, Date=None, DropOffTime=None, ReturnDates=None, PMGuarantee=None, ClientType=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.OriginZip = OriginZip + self.OriginZip_nsprefix_ = None + self.DestinationZip = DestinationZip + self.DestinationZip_nsprefix_ = None + self.Date = Date + self.Date_nsprefix_ = None + self.DropOffTime = DropOffTime + self.DropOffTime_nsprefix_ = None + self.ReturnDates = ReturnDates + self.ReturnDates_nsprefix_ = None + self.PMGuarantee = PMGuarantee + self.PMGuarantee_nsprefix_ = None + self.ClientType = ClientType + self.ClientType_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExpressMailCommitmentRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExpressMailCommitmentRequest.subclass: + return ExpressMailCommitmentRequest.subclass(*args_, **kwargs_) + else: + return ExpressMailCommitmentRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_OriginZip(self): + return self.OriginZip + def set_OriginZip(self, OriginZip): + self.OriginZip = OriginZip + def get_DestinationZip(self): + return self.DestinationZip + def set_DestinationZip(self, DestinationZip): + self.DestinationZip = DestinationZip + def get_Date(self): + return self.Date + def set_Date(self, Date): + self.Date = Date + def get_DropOffTime(self): + return self.DropOffTime + def set_DropOffTime(self, DropOffTime): + self.DropOffTime = DropOffTime + def get_ReturnDates(self): + return self.ReturnDates + def set_ReturnDates(self, ReturnDates): + self.ReturnDates = ReturnDates + def get_PMGuarantee(self): + return self.PMGuarantee + def set_PMGuarantee(self, PMGuarantee): + self.PMGuarantee = PMGuarantee + def get_ClientType(self): + return self.ClientType + def set_ClientType(self, ClientType): + self.ClientType = ClientType + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.OriginZip is not None or + self.DestinationZip is not None or + self.Date is not None or + self.DropOffTime is not None or + self.ReturnDates is not None or + self.PMGuarantee is not None or + self.ClientType is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpressMailCommitmentRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExpressMailCommitmentRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExpressMailCommitmentRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpressMailCommitmentRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExpressMailCommitmentRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExpressMailCommitmentRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpressMailCommitmentRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.OriginZip is not None: + namespaceprefix_ = self.OriginZip_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.OriginZip, input_name='OriginZip'), namespaceprefix_ , eol_)) + if self.DestinationZip is not None: + namespaceprefix_ = self.DestinationZip_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.DestinationZip, input_name='DestinationZip'), namespaceprefix_ , eol_)) + if self.Date is not None: + namespaceprefix_ = self.Date_nsprefix_ + ':' if (UseCapturedNS_ and self.Date_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Date), input_name='Date')), namespaceprefix_ , eol_)) + if self.DropOffTime is not None: + namespaceprefix_ = self.DropOffTime_nsprefix_ + ':' if (UseCapturedNS_ and self.DropOffTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDropOffTime>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DropOffTime), input_name='DropOffTime')), namespaceprefix_ , eol_)) + if self.ReturnDates is not None: + namespaceprefix_ = self.ReturnDates_nsprefix_ + ':' if (UseCapturedNS_ and self.ReturnDates_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sReturnDates>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ReturnDates), input_name='ReturnDates')), namespaceprefix_ , eol_)) + if self.PMGuarantee is not None: + namespaceprefix_ = self.PMGuarantee_nsprefix_ + ':' if (UseCapturedNS_ and self.PMGuarantee_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPMGuarantee>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PMGuarantee), input_name='PMGuarantee')), namespaceprefix_ , eol_)) + if self.ClientType is not None: + namespaceprefix_ = self.ClientType_nsprefix_ + ':' if (UseCapturedNS_ and self.ClientType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sClientType>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ClientType, input_name='ClientType'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'OriginZip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'OriginZip') + ival_ = self.gds_validate_integer(ival_, node, 'OriginZip') + self.OriginZip = ival_ + self.OriginZip_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationZip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'DestinationZip') + ival_ = self.gds_validate_integer(ival_, node, 'DestinationZip') + self.DestinationZip = ival_ + self.DestinationZip_nsprefix_ = child_.prefix + elif nodeName_ == 'Date': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Date') + value_ = self.gds_validate_string(value_, node, 'Date') + self.Date = value_ + self.Date_nsprefix_ = child_.prefix + elif nodeName_ == 'DropOffTime': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DropOffTime') + value_ = self.gds_validate_string(value_, node, 'DropOffTime') + self.DropOffTime = value_ + self.DropOffTime_nsprefix_ = child_.prefix + elif nodeName_ == 'ReturnDates': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ReturnDates') + value_ = self.gds_validate_string(value_, node, 'ReturnDates') + self.ReturnDates = value_ + self.ReturnDates_nsprefix_ = child_.prefix + elif nodeName_ == 'PMGuarantee': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PMGuarantee') + value_ = self.gds_validate_string(value_, node, 'PMGuarantee') + self.PMGuarantee = value_ + self.PMGuarantee_nsprefix_ = child_.prefix + elif nodeName_ == 'ClientType' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ClientType') + ival_ = self.gds_validate_integer(ival_, node, 'ClientType') + self.ClientType = ival_ + self.ClientType_nsprefix_ = child_.prefix +# end class ExpressMailCommitmentRequest + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ExpressMailCommitmentRequest' + rootClass = ExpressMailCommitmentRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ExpressMailCommitmentRequest' + rootClass = ExpressMailCommitmentRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ExpressMailCommitmentRequest' + rootClass = ExpressMailCommitmentRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ExpressMailCommitmentRequest' + rootClass = ExpressMailCommitmentRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from express_mail_commitment_request import *\n\n') + sys.stdout.write('import express_mail_commitment_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "ExpressMailCommitmentRequest" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/express_mail_commitment_response.py b/modules/connectors/usps/karrio/schemas/usps/express_mail_commitment_response.py new file mode 100644 index 0000000000..c5789e467b --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/express_mail_commitment_response.py @@ -0,0 +1,1846 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:41 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/express_mail_commitment_response.py') +# +# Command line arguments: +# ./schemas/ExpressMailCommitmentResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/express_mail_commitment_response.py" ./schemas/ExpressMailCommitmentResponse.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class ExpressMailCommitmentResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, OriginZIP=None, OriginCity=None, OriginState=None, DestinationZIP=None, DestinationCity=None, DestinationState=None, Date=None, Time=None, EffectiveAcceptanceDate=None, Commitment=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.OriginZIP = OriginZIP + self.OriginZIP_nsprefix_ = None + self.OriginCity = OriginCity + self.OriginCity_nsprefix_ = None + self.OriginState = OriginState + self.OriginState_nsprefix_ = None + self.DestinationZIP = DestinationZIP + self.DestinationZIP_nsprefix_ = None + self.DestinationCity = DestinationCity + self.DestinationCity_nsprefix_ = None + self.DestinationState = DestinationState + self.DestinationState_nsprefix_ = None + self.Date = Date + self.Date_nsprefix_ = None + self.Time = Time + self.Time_nsprefix_ = None + if isinstance(EffectiveAcceptanceDate, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(EffectiveAcceptanceDate, '%Y-%m-%d').date() + else: + initvalue_ = EffectiveAcceptanceDate + self.EffectiveAcceptanceDate = initvalue_ + self.EffectiveAcceptanceDate_nsprefix_ = None + if Commitment is None: + self.Commitment = [] + else: + self.Commitment = Commitment + self.Commitment_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExpressMailCommitmentResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExpressMailCommitmentResponse.subclass: + return ExpressMailCommitmentResponse.subclass(*args_, **kwargs_) + else: + return ExpressMailCommitmentResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_OriginZIP(self): + return self.OriginZIP + def set_OriginZIP(self, OriginZIP): + self.OriginZIP = OriginZIP + def get_OriginCity(self): + return self.OriginCity + def set_OriginCity(self, OriginCity): + self.OriginCity = OriginCity + def get_OriginState(self): + return self.OriginState + def set_OriginState(self, OriginState): + self.OriginState = OriginState + def get_DestinationZIP(self): + return self.DestinationZIP + def set_DestinationZIP(self, DestinationZIP): + self.DestinationZIP = DestinationZIP + def get_DestinationCity(self): + return self.DestinationCity + def set_DestinationCity(self, DestinationCity): + self.DestinationCity = DestinationCity + def get_DestinationState(self): + return self.DestinationState + def set_DestinationState(self, DestinationState): + self.DestinationState = DestinationState + def get_Date(self): + return self.Date + def set_Date(self, Date): + self.Date = Date + def get_Time(self): + return self.Time + def set_Time(self, Time): + self.Time = Time + def get_EffectiveAcceptanceDate(self): + return self.EffectiveAcceptanceDate + def set_EffectiveAcceptanceDate(self, EffectiveAcceptanceDate): + self.EffectiveAcceptanceDate = EffectiveAcceptanceDate + def get_Commitment(self): + return self.Commitment + def set_Commitment(self, Commitment): + self.Commitment = Commitment + def add_Commitment(self, value): + self.Commitment.append(value) + def insert_Commitment_at(self, index, value): + self.Commitment.insert(index, value) + def replace_Commitment_at(self, index, value): + self.Commitment[index] = value + def has__content(self): + if ( + self.OriginZIP is not None or + self.OriginCity is not None or + self.OriginState is not None or + self.DestinationZIP is not None or + self.DestinationCity is not None or + self.DestinationState is not None or + self.Date is not None or + self.Time is not None or + self.EffectiveAcceptanceDate is not None or + self.Commitment + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpressMailCommitmentResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExpressMailCommitmentResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExpressMailCommitmentResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpressMailCommitmentResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExpressMailCommitmentResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExpressMailCommitmentResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpressMailCommitmentResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.OriginZIP is not None: + namespaceprefix_ = self.OriginZIP_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginZIP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginZIP>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.OriginZIP, input_name='OriginZIP'), namespaceprefix_ , eol_)) + if self.OriginCity is not None: + namespaceprefix_ = self.OriginCity_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.OriginCity), input_name='OriginCity')), namespaceprefix_ , eol_)) + if self.OriginState is not None: + namespaceprefix_ = self.OriginState_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.OriginState), input_name='OriginState')), namespaceprefix_ , eol_)) + if self.DestinationZIP is not None: + namespaceprefix_ = self.DestinationZIP_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationZIP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationZIP>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.DestinationZIP, input_name='DestinationZIP'), namespaceprefix_ , eol_)) + if self.DestinationCity is not None: + namespaceprefix_ = self.DestinationCity_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DestinationCity), input_name='DestinationCity')), namespaceprefix_ , eol_)) + if self.DestinationState is not None: + namespaceprefix_ = self.DestinationState_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DestinationState), input_name='DestinationState')), namespaceprefix_ , eol_)) + if self.Date is not None: + namespaceprefix_ = self.Date_nsprefix_ + ':' if (UseCapturedNS_ and self.Date_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Date), input_name='Date')), namespaceprefix_ , eol_)) + if self.Time is not None: + namespaceprefix_ = self.Time_nsprefix_ + ':' if (UseCapturedNS_ and self.Time_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTime>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Time), input_name='Time')), namespaceprefix_ , eol_)) + if self.EffectiveAcceptanceDate is not None: + namespaceprefix_ = self.EffectiveAcceptanceDate_nsprefix_ + ':' if (UseCapturedNS_ and self.EffectiveAcceptanceDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEffectiveAcceptanceDate>%s%s' % (namespaceprefix_ , self.gds_format_date(self.EffectiveAcceptanceDate, input_name='EffectiveAcceptanceDate'), namespaceprefix_ , eol_)) + for Commitment_ in self.Commitment: + namespaceprefix_ = self.Commitment_nsprefix_ + ':' if (UseCapturedNS_ and self.Commitment_nsprefix_) else '' + Commitment_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Commitment', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'OriginZIP' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'OriginZIP') + ival_ = self.gds_validate_integer(ival_, node, 'OriginZIP') + self.OriginZIP = ival_ + self.OriginZIP_nsprefix_ = child_.prefix + elif nodeName_ == 'OriginCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'OriginCity') + value_ = self.gds_validate_string(value_, node, 'OriginCity') + self.OriginCity = value_ + self.OriginCity_nsprefix_ = child_.prefix + elif nodeName_ == 'OriginState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'OriginState') + value_ = self.gds_validate_string(value_, node, 'OriginState') + self.OriginState = value_ + self.OriginState_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationZIP' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'DestinationZIP') + ival_ = self.gds_validate_integer(ival_, node, 'DestinationZIP') + self.DestinationZIP = ival_ + self.DestinationZIP_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DestinationCity') + value_ = self.gds_validate_string(value_, node, 'DestinationCity') + self.DestinationCity = value_ + self.DestinationCity_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DestinationState') + value_ = self.gds_validate_string(value_, node, 'DestinationState') + self.DestinationState = value_ + self.DestinationState_nsprefix_ = child_.prefix + elif nodeName_ == 'Date': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Date') + value_ = self.gds_validate_string(value_, node, 'Date') + self.Date = value_ + self.Date_nsprefix_ = child_.prefix + elif nodeName_ == 'Time': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Time') + value_ = self.gds_validate_string(value_, node, 'Time') + self.Time = value_ + self.Time_nsprefix_ = child_.prefix + elif nodeName_ == 'EffectiveAcceptanceDate': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.EffectiveAcceptanceDate = dval_ + self.EffectiveAcceptanceDate_nsprefix_ = child_.prefix + elif nodeName_ == 'Commitment': + obj_ = CommitmentType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Commitment.append(obj_) + obj_.original_tagname_ = 'Commitment' +# end class ExpressMailCommitmentResponse + + +class CommitmentType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, CommitmentName=None, CommitmentTime=None, CommitmentSequence=None, Location=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.CommitmentName = CommitmentName + self.CommitmentName_nsprefix_ = None + self.CommitmentTime = CommitmentTime + self.CommitmentTime_nsprefix_ = None + self.CommitmentSequence = CommitmentSequence + self.CommitmentSequence_nsprefix_ = None + if Location is None: + self.Location = [] + else: + self.Location = Location + self.Location_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CommitmentType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CommitmentType.subclass: + return CommitmentType.subclass(*args_, **kwargs_) + else: + return CommitmentType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_CommitmentName(self): + return self.CommitmentName + def set_CommitmentName(self, CommitmentName): + self.CommitmentName = CommitmentName + def get_CommitmentTime(self): + return self.CommitmentTime + def set_CommitmentTime(self, CommitmentTime): + self.CommitmentTime = CommitmentTime + def get_CommitmentSequence(self): + return self.CommitmentSequence + def set_CommitmentSequence(self, CommitmentSequence): + self.CommitmentSequence = CommitmentSequence + def get_Location(self): + return self.Location + def set_Location(self, Location): + self.Location = Location + def add_Location(self, value): + self.Location.append(value) + def insert_Location_at(self, index, value): + self.Location.insert(index, value) + def replace_Location_at(self, index, value): + self.Location[index] = value + def has__content(self): + if ( + self.CommitmentName is not None or + self.CommitmentTime is not None or + self.CommitmentSequence is not None or + self.Location + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CommitmentType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CommitmentType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CommitmentType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CommitmentType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CommitmentType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CommitmentType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CommitmentType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.CommitmentName is not None: + namespaceprefix_ = self.CommitmentName_nsprefix_ + ':' if (UseCapturedNS_ and self.CommitmentName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommitmentName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CommitmentName), input_name='CommitmentName')), namespaceprefix_ , eol_)) + if self.CommitmentTime is not None: + namespaceprefix_ = self.CommitmentTime_nsprefix_ + ':' if (UseCapturedNS_ and self.CommitmentTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommitmentTime>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CommitmentTime), input_name='CommitmentTime')), namespaceprefix_ , eol_)) + if self.CommitmentSequence is not None: + namespaceprefix_ = self.CommitmentSequence_nsprefix_ + ':' if (UseCapturedNS_ and self.CommitmentSequence_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommitmentSequence>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CommitmentSequence), input_name='CommitmentSequence')), namespaceprefix_ , eol_)) + for Location_ in self.Location: + namespaceprefix_ = self.Location_nsprefix_ + ':' if (UseCapturedNS_ and self.Location_nsprefix_) else '' + Location_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Location', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'CommitmentName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CommitmentName') + value_ = self.gds_validate_string(value_, node, 'CommitmentName') + self.CommitmentName = value_ + self.CommitmentName_nsprefix_ = child_.prefix + elif nodeName_ == 'CommitmentTime': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CommitmentTime') + value_ = self.gds_validate_string(value_, node, 'CommitmentTime') + self.CommitmentTime = value_ + self.CommitmentTime_nsprefix_ = child_.prefix + elif nodeName_ == 'CommitmentSequence': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CommitmentSequence') + value_ = self.gds_validate_string(value_, node, 'CommitmentSequence') + self.CommitmentSequence = value_ + self.CommitmentSequence_nsprefix_ = child_.prefix + elif nodeName_ == 'Location': + obj_ = LocationType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Location.append(obj_) + obj_.original_tagname_ = 'Location' +# end class CommitmentType + + +class LocationType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ScheduledDeliveryDate=None, CutOff=None, Facility=None, Street=None, City=None, State=None, Zip=None, IsGuaranteed=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if isinstance(ScheduledDeliveryDate, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(ScheduledDeliveryDate, '%Y-%m-%d').date() + else: + initvalue_ = ScheduledDeliveryDate + self.ScheduledDeliveryDate = initvalue_ + self.ScheduledDeliveryDate_nsprefix_ = None + self.CutOff = CutOff + self.CutOff_nsprefix_ = None + self.Facility = Facility + self.Facility_nsprefix_ = None + self.Street = Street + self.Street_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.Zip = Zip + self.Zip_nsprefix_ = None + self.IsGuaranteed = IsGuaranteed + self.IsGuaranteed_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, LocationType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if LocationType.subclass: + return LocationType.subclass(*args_, **kwargs_) + else: + return LocationType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ScheduledDeliveryDate(self): + return self.ScheduledDeliveryDate + def set_ScheduledDeliveryDate(self, ScheduledDeliveryDate): + self.ScheduledDeliveryDate = ScheduledDeliveryDate + def get_CutOff(self): + return self.CutOff + def set_CutOff(self, CutOff): + self.CutOff = CutOff + def get_Facility(self): + return self.Facility + def set_Facility(self, Facility): + self.Facility = Facility + def get_Street(self): + return self.Street + def set_Street(self, Street): + self.Street = Street + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_Zip(self): + return self.Zip + def set_Zip(self, Zip): + self.Zip = Zip + def get_IsGuaranteed(self): + return self.IsGuaranteed + def set_IsGuaranteed(self, IsGuaranteed): + self.IsGuaranteed = IsGuaranteed + def has__content(self): + if ( + self.ScheduledDeliveryDate is not None or + self.CutOff is not None or + self.Facility is not None or + self.Street is not None or + self.City is not None or + self.State is not None or + self.Zip is not None or + self.IsGuaranteed is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LocationType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('LocationType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'LocationType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LocationType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LocationType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LocationType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LocationType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ScheduledDeliveryDate is not None: + namespaceprefix_ = self.ScheduledDeliveryDate_nsprefix_ + ':' if (UseCapturedNS_ and self.ScheduledDeliveryDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sScheduledDeliveryDate>%s%s' % (namespaceprefix_ , self.gds_format_date(self.ScheduledDeliveryDate, input_name='ScheduledDeliveryDate'), namespaceprefix_ , eol_)) + if self.CutOff is not None: + namespaceprefix_ = self.CutOff_nsprefix_ + ':' if (UseCapturedNS_ and self.CutOff_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCutOff>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CutOff), input_name='CutOff')), namespaceprefix_ , eol_)) + if self.Facility is not None: + namespaceprefix_ = self.Facility_nsprefix_ + ':' if (UseCapturedNS_ and self.Facility_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFacility>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Facility), input_name='Facility')), namespaceprefix_ , eol_)) + if self.Street is not None: + namespaceprefix_ = self.Street_nsprefix_ + ':' if (UseCapturedNS_ and self.Street_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sStreet>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Street), input_name='Street')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.Zip is not None: + namespaceprefix_ = self.Zip_nsprefix_ + ':' if (UseCapturedNS_ and self.Zip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Zip, input_name='Zip'), namespaceprefix_ , eol_)) + if self.IsGuaranteed is not None: + namespaceprefix_ = self.IsGuaranteed_nsprefix_ + ':' if (UseCapturedNS_ and self.IsGuaranteed_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sIsGuaranteed>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.IsGuaranteed, input_name='IsGuaranteed'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ScheduledDeliveryDate': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.ScheduledDeliveryDate = dval_ + self.ScheduledDeliveryDate_nsprefix_ = child_.prefix + elif nodeName_ == 'CutOff': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CutOff') + value_ = self.gds_validate_string(value_, node, 'CutOff') + self.CutOff = value_ + self.CutOff_nsprefix_ = child_.prefix + elif nodeName_ == 'Facility': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Facility') + value_ = self.gds_validate_string(value_, node, 'Facility') + self.Facility = value_ + self.Facility_nsprefix_ = child_.prefix + elif nodeName_ == 'Street': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Street') + value_ = self.gds_validate_string(value_, node, 'Street') + self.Street = value_ + self.Street_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'Zip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Zip') + ival_ = self.gds_validate_integer(ival_, node, 'Zip') + self.Zip = ival_ + self.Zip_nsprefix_ = child_.prefix + elif nodeName_ == 'IsGuaranteed' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'IsGuaranteed') + ival_ = self.gds_validate_integer(ival_, node, 'IsGuaranteed') + self.IsGuaranteed = ival_ + self.IsGuaranteed_nsprefix_ = child_.prefix +# end class LocationType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ExpressMailCommitmentResponse' + rootClass = ExpressMailCommitmentResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ExpressMailCommitmentResponse' + rootClass = ExpressMailCommitmentResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ExpressMailCommitmentResponse' + rootClass = ExpressMailCommitmentResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ExpressMailCommitmentResponse' + rootClass = ExpressMailCommitmentResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from express_mail_commitment_response import *\n\n') + sys.stdout.write('import express_mail_commitment_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CommitmentType", + "ExpressMailCommitmentResponse", + "LocationType" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/first_class_mail_request.py b/modules/connectors/usps/karrio/schemas/usps/first_class_mail_request.py new file mode 100644 index 0000000000..4da6d672db --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/first_class_mail_request.py @@ -0,0 +1,1403 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:41 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/first_class_mail_request.py') +# +# Command line arguments: +# ./schemas/FirstClassMailRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/first_class_mail_request.py" ./schemas/FirstClassMailRequest.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class FirstClassMailRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, OriginZip=None, DestinationZip=None, DestinationType=None, PMGuarantee=None, ClientType=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.OriginZip = OriginZip + self.OriginZip_nsprefix_ = None + self.DestinationZip = DestinationZip + self.DestinationZip_nsprefix_ = None + self.DestinationType = DestinationType + self.DestinationType_nsprefix_ = None + self.PMGuarantee = PMGuarantee + self.PMGuarantee_nsprefix_ = None + self.ClientType = ClientType + self.ClientType_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, FirstClassMailRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if FirstClassMailRequest.subclass: + return FirstClassMailRequest.subclass(*args_, **kwargs_) + else: + return FirstClassMailRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_OriginZip(self): + return self.OriginZip + def set_OriginZip(self, OriginZip): + self.OriginZip = OriginZip + def get_DestinationZip(self): + return self.DestinationZip + def set_DestinationZip(self, DestinationZip): + self.DestinationZip = DestinationZip + def get_DestinationType(self): + return self.DestinationType + def set_DestinationType(self, DestinationType): + self.DestinationType = DestinationType + def get_PMGuarantee(self): + return self.PMGuarantee + def set_PMGuarantee(self, PMGuarantee): + self.PMGuarantee = PMGuarantee + def get_ClientType(self): + return self.ClientType + def set_ClientType(self, ClientType): + self.ClientType = ClientType + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.OriginZip is not None or + self.DestinationZip is not None or + self.DestinationType is not None or + self.PMGuarantee is not None or + self.ClientType is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FirstClassMailRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('FirstClassMailRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'FirstClassMailRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FirstClassMailRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='FirstClassMailRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='FirstClassMailRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FirstClassMailRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.OriginZip is not None: + namespaceprefix_ = self.OriginZip_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.OriginZip, input_name='OriginZip'), namespaceprefix_ , eol_)) + if self.DestinationZip is not None: + namespaceprefix_ = self.DestinationZip_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.DestinationZip, input_name='DestinationZip'), namespaceprefix_ , eol_)) + if self.DestinationType is not None: + namespaceprefix_ = self.DestinationType_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationType>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.DestinationType, input_name='DestinationType'), namespaceprefix_ , eol_)) + if self.PMGuarantee is not None: + namespaceprefix_ = self.PMGuarantee_nsprefix_ + ':' if (UseCapturedNS_ and self.PMGuarantee_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPMGuarantee>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PMGuarantee), input_name='PMGuarantee')), namespaceprefix_ , eol_)) + if self.ClientType is not None: + namespaceprefix_ = self.ClientType_nsprefix_ + ':' if (UseCapturedNS_ and self.ClientType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sClientType>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ClientType, input_name='ClientType'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'OriginZip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'OriginZip') + ival_ = self.gds_validate_integer(ival_, node, 'OriginZip') + self.OriginZip = ival_ + self.OriginZip_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationZip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'DestinationZip') + ival_ = self.gds_validate_integer(ival_, node, 'DestinationZip') + self.DestinationZip = ival_ + self.DestinationZip_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationType' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'DestinationType') + ival_ = self.gds_validate_integer(ival_, node, 'DestinationType') + self.DestinationType = ival_ + self.DestinationType_nsprefix_ = child_.prefix + elif nodeName_ == 'PMGuarantee': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PMGuarantee') + value_ = self.gds_validate_string(value_, node, 'PMGuarantee') + self.PMGuarantee = value_ + self.PMGuarantee_nsprefix_ = child_.prefix + elif nodeName_ == 'ClientType' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ClientType') + ival_ = self.gds_validate_integer(ival_, node, 'ClientType') + self.ClientType = ival_ + self.ClientType_nsprefix_ = child_.prefix +# end class FirstClassMailRequest + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'FirstClassMailRequest' + rootClass = FirstClassMailRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'FirstClassMailRequest' + rootClass = FirstClassMailRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'FirstClassMailRequest' + rootClass = FirstClassMailRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'FirstClassMailRequest' + rootClass = FirstClassMailRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from first_class_mail_request import *\n\n') + sys.stdout.write('import first_class_mail_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "FirstClassMailRequest" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/first_class_mail_response.py b/modules/connectors/usps/karrio/schemas/usps/first_class_mail_response.py new file mode 100644 index 0000000000..12791d78c9 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/first_class_mail_response.py @@ -0,0 +1,1396 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:41 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/first_class_mail_response.py') +# +# Command line arguments: +# ./schemas/FirstClassMailResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/first_class_mail_response.py" ./schemas/FirstClassMailResponse.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class FirstClassMailResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, OriginZip=None, DestinationZip=None, Days=None, Message=None, EffectiveAcceptanceDate=None, ScheduledDeliveryDate=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.OriginZip = OriginZip + self.OriginZip_nsprefix_ = None + self.DestinationZip = DestinationZip + self.DestinationZip_nsprefix_ = None + self.Days = Days + self.Days_nsprefix_ = None + self.Message = Message + self.Message_nsprefix_ = None + self.EffectiveAcceptanceDate = EffectiveAcceptanceDate + self.EffectiveAcceptanceDate_nsprefix_ = None + self.ScheduledDeliveryDate = ScheduledDeliveryDate + self.ScheduledDeliveryDate_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, FirstClassMailResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if FirstClassMailResponse.subclass: + return FirstClassMailResponse.subclass(*args_, **kwargs_) + else: + return FirstClassMailResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_OriginZip(self): + return self.OriginZip + def set_OriginZip(self, OriginZip): + self.OriginZip = OriginZip + def get_DestinationZip(self): + return self.DestinationZip + def set_DestinationZip(self, DestinationZip): + self.DestinationZip = DestinationZip + def get_Days(self): + return self.Days + def set_Days(self, Days): + self.Days = Days + def get_Message(self): + return self.Message + def set_Message(self, Message): + self.Message = Message + def get_EffectiveAcceptanceDate(self): + return self.EffectiveAcceptanceDate + def set_EffectiveAcceptanceDate(self, EffectiveAcceptanceDate): + self.EffectiveAcceptanceDate = EffectiveAcceptanceDate + def get_ScheduledDeliveryDate(self): + return self.ScheduledDeliveryDate + def set_ScheduledDeliveryDate(self, ScheduledDeliveryDate): + self.ScheduledDeliveryDate = ScheduledDeliveryDate + def has__content(self): + if ( + self.OriginZip is not None or + self.DestinationZip is not None or + self.Days is not None or + self.Message is not None or + self.EffectiveAcceptanceDate is not None or + self.ScheduledDeliveryDate is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FirstClassMailResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('FirstClassMailResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'FirstClassMailResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FirstClassMailResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='FirstClassMailResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='FirstClassMailResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FirstClassMailResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.OriginZip is not None: + namespaceprefix_ = self.OriginZip_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.OriginZip, input_name='OriginZip'), namespaceprefix_ , eol_)) + if self.DestinationZip is not None: + namespaceprefix_ = self.DestinationZip_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.DestinationZip, input_name='DestinationZip'), namespaceprefix_ , eol_)) + if self.Days is not None: + namespaceprefix_ = self.Days_nsprefix_ + ':' if (UseCapturedNS_ and self.Days_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDays>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Days, input_name='Days'), namespaceprefix_ , eol_)) + if self.Message is not None: + namespaceprefix_ = self.Message_nsprefix_ + ':' if (UseCapturedNS_ and self.Message_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMessage>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Message), input_name='Message')), namespaceprefix_ , eol_)) + if self.EffectiveAcceptanceDate is not None: + namespaceprefix_ = self.EffectiveAcceptanceDate_nsprefix_ + ':' if (UseCapturedNS_ and self.EffectiveAcceptanceDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEffectiveAcceptanceDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EffectiveAcceptanceDate), input_name='EffectiveAcceptanceDate')), namespaceprefix_ , eol_)) + if self.ScheduledDeliveryDate is not None: + namespaceprefix_ = self.ScheduledDeliveryDate_nsprefix_ + ':' if (UseCapturedNS_ and self.ScheduledDeliveryDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sScheduledDeliveryDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ScheduledDeliveryDate), input_name='ScheduledDeliveryDate')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'OriginZip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'OriginZip') + ival_ = self.gds_validate_integer(ival_, node, 'OriginZip') + self.OriginZip = ival_ + self.OriginZip_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationZip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'DestinationZip') + ival_ = self.gds_validate_integer(ival_, node, 'DestinationZip') + self.DestinationZip = ival_ + self.DestinationZip_nsprefix_ = child_.prefix + elif nodeName_ == 'Days' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Days') + ival_ = self.gds_validate_integer(ival_, node, 'Days') + self.Days = ival_ + self.Days_nsprefix_ = child_.prefix + elif nodeName_ == 'Message': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Message') + value_ = self.gds_validate_string(value_, node, 'Message') + self.Message = value_ + self.Message_nsprefix_ = child_.prefix + elif nodeName_ == 'EffectiveAcceptanceDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EffectiveAcceptanceDate') + value_ = self.gds_validate_string(value_, node, 'EffectiveAcceptanceDate') + self.EffectiveAcceptanceDate = value_ + self.EffectiveAcceptanceDate_nsprefix_ = child_.prefix + elif nodeName_ == 'ScheduledDeliveryDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ScheduledDeliveryDate') + value_ = self.gds_validate_string(value_, node, 'ScheduledDeliveryDate') + self.ScheduledDeliveryDate = value_ + self.ScheduledDeliveryDate_nsprefix_ = child_.prefix +# end class FirstClassMailResponse + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'FirstClassMailResponse' + rootClass = FirstClassMailResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'FirstClassMailResponse' + rootClass = FirstClassMailResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'FirstClassMailResponse' + rootClass = FirstClassMailResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'FirstClassMailResponse' + rootClass = FirstClassMailResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from first_class_mail_response import *\n\n') + sys.stdout.write('import first_class_mail_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "FirstClassMailResponse" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/hfp_facility_info_request.py b/modules/connectors/usps/karrio/schemas/usps/hfp_facility_info_request.py new file mode 100644 index 0000000000..8696034bbc --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/hfp_facility_info_request.py @@ -0,0 +1,1403 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:42 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/hfp_facility_info_request.py') +# +# Command line arguments: +# ./schemas/HFPFacilityInfoRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/hfp_facility_info_request.py" ./schemas/HFPFacilityInfoRequest.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class HFPFacilityInfoRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, PickupCity=None, PickupState=None, PickupZIP=None, PickupZIP4=None, Service=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.PickupCity = PickupCity + self.PickupCity_nsprefix_ = None + self.PickupState = PickupState + self.PickupState_nsprefix_ = None + self.PickupZIP = PickupZIP + self.PickupZIP_nsprefix_ = None + self.PickupZIP4 = PickupZIP4 + self.PickupZIP4_nsprefix_ = None + self.Service = Service + self.Service_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, HFPFacilityInfoRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if HFPFacilityInfoRequest.subclass: + return HFPFacilityInfoRequest.subclass(*args_, **kwargs_) + else: + return HFPFacilityInfoRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_PickupCity(self): + return self.PickupCity + def set_PickupCity(self, PickupCity): + self.PickupCity = PickupCity + def get_PickupState(self): + return self.PickupState + def set_PickupState(self, PickupState): + self.PickupState = PickupState + def get_PickupZIP(self): + return self.PickupZIP + def set_PickupZIP(self, PickupZIP): + self.PickupZIP = PickupZIP + def get_PickupZIP4(self): + return self.PickupZIP4 + def set_PickupZIP4(self, PickupZIP4): + self.PickupZIP4 = PickupZIP4 + def get_Service(self): + return self.Service + def set_Service(self, Service): + self.Service = Service + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.PickupCity is not None or + self.PickupState is not None or + self.PickupZIP is not None or + self.PickupZIP4 is not None or + self.Service is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HFPFacilityInfoRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('HFPFacilityInfoRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'HFPFacilityInfoRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='HFPFacilityInfoRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='HFPFacilityInfoRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='HFPFacilityInfoRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HFPFacilityInfoRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.PickupCity is not None: + namespaceprefix_ = self.PickupCity_nsprefix_ + ':' if (UseCapturedNS_ and self.PickupCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPickupCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PickupCity), input_name='PickupCity')), namespaceprefix_ , eol_)) + if self.PickupState is not None: + namespaceprefix_ = self.PickupState_nsprefix_ + ':' if (UseCapturedNS_ and self.PickupState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPickupState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PickupState), input_name='PickupState')), namespaceprefix_ , eol_)) + if self.PickupZIP is not None: + namespaceprefix_ = self.PickupZIP_nsprefix_ + ':' if (UseCapturedNS_ and self.PickupZIP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPickupZIP>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.PickupZIP, input_name='PickupZIP'), namespaceprefix_ , eol_)) + if self.PickupZIP4 is not None: + namespaceprefix_ = self.PickupZIP4_nsprefix_ + ':' if (UseCapturedNS_ and self.PickupZIP4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPickupZIP4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PickupZIP4), input_name='PickupZIP4')), namespaceprefix_ , eol_)) + if self.Service is not None: + namespaceprefix_ = self.Service_nsprefix_ + ':' if (UseCapturedNS_ and self.Service_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sService>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Service), input_name='Service')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'PickupCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PickupCity') + value_ = self.gds_validate_string(value_, node, 'PickupCity') + self.PickupCity = value_ + self.PickupCity_nsprefix_ = child_.prefix + elif nodeName_ == 'PickupState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PickupState') + value_ = self.gds_validate_string(value_, node, 'PickupState') + self.PickupState = value_ + self.PickupState_nsprefix_ = child_.prefix + elif nodeName_ == 'PickupZIP' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'PickupZIP') + ival_ = self.gds_validate_integer(ival_, node, 'PickupZIP') + self.PickupZIP = ival_ + self.PickupZIP_nsprefix_ = child_.prefix + elif nodeName_ == 'PickupZIP4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PickupZIP4') + value_ = self.gds_validate_string(value_, node, 'PickupZIP4') + self.PickupZIP4 = value_ + self.PickupZIP4_nsprefix_ = child_.prefix + elif nodeName_ == 'Service': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Service') + value_ = self.gds_validate_string(value_, node, 'Service') + self.Service = value_ + self.Service_nsprefix_ = child_.prefix +# end class HFPFacilityInfoRequest + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'HFPFacilityInfoRequest' + rootClass = HFPFacilityInfoRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'HFPFacilityInfoRequest' + rootClass = HFPFacilityInfoRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'HFPFacilityInfoRequest' + rootClass = HFPFacilityInfoRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'HFPFacilityInfoRequest' + rootClass = HFPFacilityInfoRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from hfp_facility_info_request import *\n\n') + sys.stdout.write('import hfp_facility_info_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "HFPFacilityInfoRequest" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/hfp_facility_info_response.py b/modules/connectors/usps/karrio/schemas/usps/hfp_facility_info_response.py new file mode 100644 index 0000000000..0412327ad1 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/hfp_facility_info_response.py @@ -0,0 +1,1601 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:42 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/hfp_facility_info_response.py') +# +# Command line arguments: +# ./schemas/HFPFacilityInfoResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/hfp_facility_info_response.py" ./schemas/HFPFacilityInfoResponse.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class HFPFacilityInfoResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, PickupCity=None, PickupState=None, PickupZIP=None, PickupZIP4=None, Facility=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.PickupCity = PickupCity + self.PickupCity_nsprefix_ = None + self.PickupState = PickupState + self.PickupState_nsprefix_ = None + self.PickupZIP = PickupZIP + self.PickupZIP_nsprefix_ = None + self.PickupZIP4 = PickupZIP4 + self.PickupZIP4_nsprefix_ = None + if Facility is None: + self.Facility = [] + else: + self.Facility = Facility + self.Facility_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, HFPFacilityInfoResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if HFPFacilityInfoResponse.subclass: + return HFPFacilityInfoResponse.subclass(*args_, **kwargs_) + else: + return HFPFacilityInfoResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_PickupCity(self): + return self.PickupCity + def set_PickupCity(self, PickupCity): + self.PickupCity = PickupCity + def get_PickupState(self): + return self.PickupState + def set_PickupState(self, PickupState): + self.PickupState = PickupState + def get_PickupZIP(self): + return self.PickupZIP + def set_PickupZIP(self, PickupZIP): + self.PickupZIP = PickupZIP + def get_PickupZIP4(self): + return self.PickupZIP4 + def set_PickupZIP4(self, PickupZIP4): + self.PickupZIP4 = PickupZIP4 + def get_Facility(self): + return self.Facility + def set_Facility(self, Facility): + self.Facility = Facility + def add_Facility(self, value): + self.Facility.append(value) + def insert_Facility_at(self, index, value): + self.Facility.insert(index, value) + def replace_Facility_at(self, index, value): + self.Facility[index] = value + def has__content(self): + if ( + self.PickupCity is not None or + self.PickupState is not None or + self.PickupZIP is not None or + self.PickupZIP4 is not None or + self.Facility + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HFPFacilityInfoResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('HFPFacilityInfoResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'HFPFacilityInfoResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='HFPFacilityInfoResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='HFPFacilityInfoResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='HFPFacilityInfoResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HFPFacilityInfoResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.PickupCity is not None: + namespaceprefix_ = self.PickupCity_nsprefix_ + ':' if (UseCapturedNS_ and self.PickupCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPickupCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PickupCity), input_name='PickupCity')), namespaceprefix_ , eol_)) + if self.PickupState is not None: + namespaceprefix_ = self.PickupState_nsprefix_ + ':' if (UseCapturedNS_ and self.PickupState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPickupState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PickupState), input_name='PickupState')), namespaceprefix_ , eol_)) + if self.PickupZIP is not None: + namespaceprefix_ = self.PickupZIP_nsprefix_ + ':' if (UseCapturedNS_ and self.PickupZIP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPickupZIP>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.PickupZIP, input_name='PickupZIP'), namespaceprefix_ , eol_)) + if self.PickupZIP4 is not None: + namespaceprefix_ = self.PickupZIP4_nsprefix_ + ':' if (UseCapturedNS_ and self.PickupZIP4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPickupZIP4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PickupZIP4), input_name='PickupZIP4')), namespaceprefix_ , eol_)) + for Facility_ in self.Facility: + namespaceprefix_ = self.Facility_nsprefix_ + ':' if (UseCapturedNS_ and self.Facility_nsprefix_) else '' + Facility_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Facility', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'PickupCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PickupCity') + value_ = self.gds_validate_string(value_, node, 'PickupCity') + self.PickupCity = value_ + self.PickupCity_nsprefix_ = child_.prefix + elif nodeName_ == 'PickupState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PickupState') + value_ = self.gds_validate_string(value_, node, 'PickupState') + self.PickupState = value_ + self.PickupState_nsprefix_ = child_.prefix + elif nodeName_ == 'PickupZIP' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'PickupZIP') + ival_ = self.gds_validate_integer(ival_, node, 'PickupZIP') + self.PickupZIP = ival_ + self.PickupZIP_nsprefix_ = child_.prefix + elif nodeName_ == 'PickupZIP4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PickupZIP4') + value_ = self.gds_validate_string(value_, node, 'PickupZIP4') + self.PickupZIP4 = value_ + self.PickupZIP4_nsprefix_ = child_.prefix + elif nodeName_ == 'Facility': + obj_ = FacilityType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Facility.append(obj_) + obj_.original_tagname_ = 'Facility' +# end class HFPFacilityInfoResponse + + +class FacilityType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, FacilityID=None, FacilityName=None, FacilityAddress=None, FacilityCity=None, FacilityState=None, FacilityZIP=None, FacilityZIP4=None, Has10amCommitment=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.FacilityID = FacilityID + self.FacilityID_nsprefix_ = None + self.FacilityName = FacilityName + self.FacilityName_nsprefix_ = None + self.FacilityAddress = FacilityAddress + self.FacilityAddress_nsprefix_ = None + self.FacilityCity = FacilityCity + self.FacilityCity_nsprefix_ = None + self.FacilityState = FacilityState + self.FacilityState_nsprefix_ = None + self.FacilityZIP = FacilityZIP + self.FacilityZIP_nsprefix_ = None + self.FacilityZIP4 = FacilityZIP4 + self.FacilityZIP4_nsprefix_ = None + self.Has10amCommitment = Has10amCommitment + self.Has10amCommitment_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, FacilityType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if FacilityType.subclass: + return FacilityType.subclass(*args_, **kwargs_) + else: + return FacilityType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FacilityID(self): + return self.FacilityID + def set_FacilityID(self, FacilityID): + self.FacilityID = FacilityID + def get_FacilityName(self): + return self.FacilityName + def set_FacilityName(self, FacilityName): + self.FacilityName = FacilityName + def get_FacilityAddress(self): + return self.FacilityAddress + def set_FacilityAddress(self, FacilityAddress): + self.FacilityAddress = FacilityAddress + def get_FacilityCity(self): + return self.FacilityCity + def set_FacilityCity(self, FacilityCity): + self.FacilityCity = FacilityCity + def get_FacilityState(self): + return self.FacilityState + def set_FacilityState(self, FacilityState): + self.FacilityState = FacilityState + def get_FacilityZIP(self): + return self.FacilityZIP + def set_FacilityZIP(self, FacilityZIP): + self.FacilityZIP = FacilityZIP + def get_FacilityZIP4(self): + return self.FacilityZIP4 + def set_FacilityZIP4(self, FacilityZIP4): + self.FacilityZIP4 = FacilityZIP4 + def get_Has10amCommitment(self): + return self.Has10amCommitment + def set_Has10amCommitment(self, Has10amCommitment): + self.Has10amCommitment = Has10amCommitment + def has__content(self): + if ( + self.FacilityID is not None or + self.FacilityName is not None or + self.FacilityAddress is not None or + self.FacilityCity is not None or + self.FacilityState is not None or + self.FacilityZIP is not None or + self.FacilityZIP4 is not None or + self.Has10amCommitment is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FacilityType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('FacilityType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'FacilityType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FacilityType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='FacilityType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='FacilityType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FacilityType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FacilityID is not None: + namespaceprefix_ = self.FacilityID_nsprefix_ + ':' if (UseCapturedNS_ and self.FacilityID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFacilityID>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.FacilityID, input_name='FacilityID'), namespaceprefix_ , eol_)) + if self.FacilityName is not None: + namespaceprefix_ = self.FacilityName_nsprefix_ + ':' if (UseCapturedNS_ and self.FacilityName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFacilityName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FacilityName), input_name='FacilityName')), namespaceprefix_ , eol_)) + if self.FacilityAddress is not None: + namespaceprefix_ = self.FacilityAddress_nsprefix_ + ':' if (UseCapturedNS_ and self.FacilityAddress_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFacilityAddress>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FacilityAddress), input_name='FacilityAddress')), namespaceprefix_ , eol_)) + if self.FacilityCity is not None: + namespaceprefix_ = self.FacilityCity_nsprefix_ + ':' if (UseCapturedNS_ and self.FacilityCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFacilityCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FacilityCity), input_name='FacilityCity')), namespaceprefix_ , eol_)) + if self.FacilityState is not None: + namespaceprefix_ = self.FacilityState_nsprefix_ + ':' if (UseCapturedNS_ and self.FacilityState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFacilityState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FacilityState), input_name='FacilityState')), namespaceprefix_ , eol_)) + if self.FacilityZIP is not None: + namespaceprefix_ = self.FacilityZIP_nsprefix_ + ':' if (UseCapturedNS_ and self.FacilityZIP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFacilityZIP>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.FacilityZIP, input_name='FacilityZIP'), namespaceprefix_ , eol_)) + if self.FacilityZIP4 is not None: + namespaceprefix_ = self.FacilityZIP4_nsprefix_ + ':' if (UseCapturedNS_ and self.FacilityZIP4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFacilityZIP4>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.FacilityZIP4, input_name='FacilityZIP4'), namespaceprefix_ , eol_)) + if self.Has10amCommitment is not None: + namespaceprefix_ = self.Has10amCommitment_nsprefix_ + ':' if (UseCapturedNS_ and self.Has10amCommitment_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHas10amCommitment>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Has10amCommitment), input_name='Has10amCommitment')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FacilityID' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'FacilityID') + ival_ = self.gds_validate_integer(ival_, node, 'FacilityID') + self.FacilityID = ival_ + self.FacilityID_nsprefix_ = child_.prefix + elif nodeName_ == 'FacilityName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FacilityName') + value_ = self.gds_validate_string(value_, node, 'FacilityName') + self.FacilityName = value_ + self.FacilityName_nsprefix_ = child_.prefix + elif nodeName_ == 'FacilityAddress': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FacilityAddress') + value_ = self.gds_validate_string(value_, node, 'FacilityAddress') + self.FacilityAddress = value_ + self.FacilityAddress_nsprefix_ = child_.prefix + elif nodeName_ == 'FacilityCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FacilityCity') + value_ = self.gds_validate_string(value_, node, 'FacilityCity') + self.FacilityCity = value_ + self.FacilityCity_nsprefix_ = child_.prefix + elif nodeName_ == 'FacilityState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FacilityState') + value_ = self.gds_validate_string(value_, node, 'FacilityState') + self.FacilityState = value_ + self.FacilityState_nsprefix_ = child_.prefix + elif nodeName_ == 'FacilityZIP' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'FacilityZIP') + ival_ = self.gds_validate_integer(ival_, node, 'FacilityZIP') + self.FacilityZIP = ival_ + self.FacilityZIP_nsprefix_ = child_.prefix + elif nodeName_ == 'FacilityZIP4' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'FacilityZIP4') + ival_ = self.gds_validate_integer(ival_, node, 'FacilityZIP4') + self.FacilityZIP4 = ival_ + self.FacilityZIP4_nsprefix_ = child_.prefix + elif nodeName_ == 'Has10amCommitment': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Has10amCommitment') + value_ = self.gds_validate_string(value_, node, 'Has10amCommitment') + self.Has10amCommitment = value_ + self.Has10amCommitment_nsprefix_ = child_.prefix +# end class FacilityType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'HFPFacilityInfoResponse' + rootClass = HFPFacilityInfoResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'HFPFacilityInfoResponse' + rootClass = HFPFacilityInfoResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'HFPFacilityInfoResponse' + rootClass = HFPFacilityInfoResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'HFPFacilityInfoResponse' + rootClass = HFPFacilityInfoResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from hfp_facility_info_response import *\n\n') + sys.stdout.write('import hfp_facility_info_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "FacilityType", + "HFPFacilityInfoResponse" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/intl_rate_v2_request.py b/modules/connectors/usps/karrio/schemas/usps/intl_rate_v2_request.py new file mode 100644 index 0000000000..15ddfa8de3 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/intl_rate_v2_request.py @@ -0,0 +1,2150 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:42 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/intl_rate_v2_request.py') +# +# Command line arguments: +# ./schemas/IntlRateV2Request.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/intl_rate_v2_request.py" ./schemas/IntlRateV2Request.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class IntlRateV2Request(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, Revision=None, Package=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.Revision = Revision + self.Revision_nsprefix_ = None + if Package is None: + self.Package = [] + else: + self.Package = Package + self.Package_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, IntlRateV2Request) + if subclass is not None: + return subclass(*args_, **kwargs_) + if IntlRateV2Request.subclass: + return IntlRateV2Request.subclass(*args_, **kwargs_) + else: + return IntlRateV2Request(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Revision(self): + return self.Revision + def set_Revision(self, Revision): + self.Revision = Revision + def get_Package(self): + return self.Package + def set_Package(self, Package): + self.Package = Package + def add_Package(self, value): + self.Package.append(value) + def insert_Package_at(self, index, value): + self.Package.insert(index, value) + def replace_Package_at(self, index, value): + self.Package[index] = value + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.Revision is not None or + self.Package + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IntlRateV2Request', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('IntlRateV2Request') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'IntlRateV2Request': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IntlRateV2Request') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IntlRateV2Request', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IntlRateV2Request'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IntlRateV2Request', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Revision is not None: + namespaceprefix_ = self.Revision_nsprefix_ + ':' if (UseCapturedNS_ and self.Revision_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRevision>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Revision), input_name='Revision')), namespaceprefix_ , eol_)) + for Package_ in self.Package: + namespaceprefix_ = self.Package_nsprefix_ + ':' if (UseCapturedNS_ and self.Package_nsprefix_) else '' + Package_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Package', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Revision': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Revision') + value_ = self.gds_validate_string(value_, node, 'Revision') + self.Revision = value_ + self.Revision_nsprefix_ = child_.prefix + elif nodeName_ == 'Package': + obj_ = PackageType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Package.append(obj_) + obj_.original_tagname_ = 'Package' +# end class IntlRateV2Request + + +class PackageType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ID=None, Pounds=None, Ounces=None, Machinable=True, MailType=None, GXG=None, ValueOfContents=None, Country=None, Container=None, Size=None, Width=None, Length=None, Height=None, Girth=None, OriginZip=None, CommercialFlag=None, CommercialPlusFlag=None, ExtraServices=None, AcceptanceDateTime=None, DestinationPostalCode=None, Content=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ID = _cast(None, ID) + self.ID_nsprefix_ = None + self.Pounds = Pounds + self.validate_PoundsType(self.Pounds) + self.Pounds_nsprefix_ = None + self.Ounces = Ounces + self.validate_OuncesType(self.Ounces) + self.Ounces_nsprefix_ = None + self.Machinable = Machinable + self.Machinable_nsprefix_ = None + self.MailType = MailType + self.MailType_nsprefix_ = None + self.GXG = GXG + self.GXG_nsprefix_ = None + self.ValueOfContents = ValueOfContents + self.ValueOfContents_nsprefix_ = None + self.Country = Country + self.Country_nsprefix_ = None + self.Container = Container + self.Container_nsprefix_ = None + self.Size = Size + self.Size_nsprefix_ = None + self.Width = Width + self.Width_nsprefix_ = None + self.Length = Length + self.Length_nsprefix_ = None + self.Height = Height + self.Height_nsprefix_ = None + self.Girth = Girth + self.Girth_nsprefix_ = None + self.OriginZip = OriginZip + self.OriginZip_nsprefix_ = None + self.CommercialFlag = CommercialFlag + self.CommercialFlag_nsprefix_ = None + self.CommercialPlusFlag = CommercialPlusFlag + self.CommercialPlusFlag_nsprefix_ = None + self.ExtraServices = ExtraServices + self.ExtraServices_nsprefix_ = None + self.AcceptanceDateTime = AcceptanceDateTime + self.AcceptanceDateTime_nsprefix_ = None + self.DestinationPostalCode = DestinationPostalCode + self.DestinationPostalCode_nsprefix_ = None + self.Content = Content + self.Content_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PackageType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PackageType.subclass: + return PackageType.subclass(*args_, **kwargs_) + else: + return PackageType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Pounds(self): + return self.Pounds + def set_Pounds(self, Pounds): + self.Pounds = Pounds + def get_Ounces(self): + return self.Ounces + def set_Ounces(self, Ounces): + self.Ounces = Ounces + def get_Machinable(self): + return self.Machinable + def set_Machinable(self, Machinable): + self.Machinable = Machinable + def get_MailType(self): + return self.MailType + def set_MailType(self, MailType): + self.MailType = MailType + def get_GXG(self): + return self.GXG + def set_GXG(self, GXG): + self.GXG = GXG + def get_ValueOfContents(self): + return self.ValueOfContents + def set_ValueOfContents(self, ValueOfContents): + self.ValueOfContents = ValueOfContents + def get_Country(self): + return self.Country + def set_Country(self, Country): + self.Country = Country + def get_Container(self): + return self.Container + def set_Container(self, Container): + self.Container = Container + def get_Size(self): + return self.Size + def set_Size(self, Size): + self.Size = Size + def get_Width(self): + return self.Width + def set_Width(self, Width): + self.Width = Width + def get_Length(self): + return self.Length + def set_Length(self, Length): + self.Length = Length + def get_Height(self): + return self.Height + def set_Height(self, Height): + self.Height = Height + def get_Girth(self): + return self.Girth + def set_Girth(self, Girth): + self.Girth = Girth + def get_OriginZip(self): + return self.OriginZip + def set_OriginZip(self, OriginZip): + self.OriginZip = OriginZip + def get_CommercialFlag(self): + return self.CommercialFlag + def set_CommercialFlag(self, CommercialFlag): + self.CommercialFlag = CommercialFlag + def get_CommercialPlusFlag(self): + return self.CommercialPlusFlag + def set_CommercialPlusFlag(self, CommercialPlusFlag): + self.CommercialPlusFlag = CommercialPlusFlag + def get_ExtraServices(self): + return self.ExtraServices + def set_ExtraServices(self, ExtraServices): + self.ExtraServices = ExtraServices + def get_AcceptanceDateTime(self): + return self.AcceptanceDateTime + def set_AcceptanceDateTime(self, AcceptanceDateTime): + self.AcceptanceDateTime = AcceptanceDateTime + def get_DestinationPostalCode(self): + return self.DestinationPostalCode + def set_DestinationPostalCode(self, DestinationPostalCode): + self.DestinationPostalCode = DestinationPostalCode + def get_Content(self): + return self.Content + def set_Content(self, Content): + self.Content = Content + def get_ID(self): + return self.ID + def set_ID(self, ID): + self.ID = ID + def validate_PoundsType(self, value): + result = True + # Validate type PoundsType, a restriction on xs:integer. + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' % {"value": value, "lineno": lineno, }) + return False + if value < 0: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on PoundsType' % {"value": value, "lineno": lineno} ) + result = False + if value > 70: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd maxInclusive restriction on PoundsType' % {"value": value, "lineno": lineno} ) + result = False + return result + def validate_OuncesType(self, value): + result = True + # Validate type OuncesType, a restriction on xs:decimal. + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, decimal_.Decimal): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (decimal_.Decimal)' % {"value": value, "lineno": lineno, }) + return False + if value < 0.0: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on OuncesType' % {"value": value, "lineno": lineno} ) + result = False + return result + def has__content(self): + if ( + self.Pounds is not None or + self.Ounces is not None or + not self.Machinable or + self.MailType is not None or + self.GXG is not None or + self.ValueOfContents is not None or + self.Country is not None or + self.Container is not None or + self.Size is not None or + self.Width is not None or + self.Length is not None or + self.Height is not None or + self.Girth is not None or + self.OriginZip is not None or + self.CommercialFlag is not None or + self.CommercialPlusFlag is not None or + self.ExtraServices is not None or + self.AcceptanceDateTime is not None or + self.DestinationPostalCode is not None or + self.Content is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PackageType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PackageType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PackageType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PackageType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PackageType'): + if self.ID is not None and 'ID' not in already_processed: + already_processed.add('ID') + outfile.write(' ID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ID), input_name='ID')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Pounds is not None: + namespaceprefix_ = self.Pounds_nsprefix_ + ':' if (UseCapturedNS_ and self.Pounds_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPounds>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Pounds, input_name='Pounds'), namespaceprefix_ , eol_)) + if self.Ounces is not None: + namespaceprefix_ = self.Ounces_nsprefix_ + ':' if (UseCapturedNS_ and self.Ounces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOunces>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Ounces, input_name='Ounces'), namespaceprefix_ , eol_)) + if not self.Machinable: + namespaceprefix_ = self.Machinable_nsprefix_ + ':' if (UseCapturedNS_ and self.Machinable_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMachinable>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.Machinable, input_name='Machinable'), namespaceprefix_ , eol_)) + if self.MailType is not None: + namespaceprefix_ = self.MailType_nsprefix_ + ':' if (UseCapturedNS_ and self.MailType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMailType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MailType), input_name='MailType')), namespaceprefix_ , eol_)) + if self.GXG is not None: + namespaceprefix_ = self.GXG_nsprefix_ + ':' if (UseCapturedNS_ and self.GXG_nsprefix_) else '' + self.GXG.export(outfile, level, namespaceprefix_, namespacedef_='', name_='GXG', pretty_print=pretty_print) + if self.ValueOfContents is not None: + namespaceprefix_ = self.ValueOfContents_nsprefix_ + ':' if (UseCapturedNS_ and self.ValueOfContents_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sValueOfContents>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ValueOfContents), input_name='ValueOfContents')), namespaceprefix_ , eol_)) + if self.Country is not None: + namespaceprefix_ = self.Country_nsprefix_ + ':' if (UseCapturedNS_ and self.Country_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCountry>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Country), input_name='Country')), namespaceprefix_ , eol_)) + if self.Container is not None: + namespaceprefix_ = self.Container_nsprefix_ + ':' if (UseCapturedNS_ and self.Container_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContainer>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Container), input_name='Container')), namespaceprefix_ , eol_)) + if self.Size is not None: + namespaceprefix_ = self.Size_nsprefix_ + ':' if (UseCapturedNS_ and self.Size_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSize>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Size), input_name='Size')), namespaceprefix_ , eol_)) + if self.Width is not None: + namespaceprefix_ = self.Width_nsprefix_ + ':' if (UseCapturedNS_ and self.Width_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sWidth>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Width, input_name='Width'), namespaceprefix_ , eol_)) + if self.Length is not None: + namespaceprefix_ = self.Length_nsprefix_ + ':' if (UseCapturedNS_ and self.Length_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLength>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Length, input_name='Length'), namespaceprefix_ , eol_)) + if self.Height is not None: + namespaceprefix_ = self.Height_nsprefix_ + ':' if (UseCapturedNS_ and self.Height_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHeight>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Height, input_name='Height'), namespaceprefix_ , eol_)) + if self.Girth is not None: + namespaceprefix_ = self.Girth_nsprefix_ + ':' if (UseCapturedNS_ and self.Girth_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGirth>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Girth, input_name='Girth'), namespaceprefix_ , eol_)) + if self.OriginZip is not None: + namespaceprefix_ = self.OriginZip_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginZip>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.OriginZip), input_name='OriginZip')), namespaceprefix_ , eol_)) + if self.CommercialFlag is not None: + namespaceprefix_ = self.CommercialFlag_nsprefix_ + ':' if (UseCapturedNS_ and self.CommercialFlag_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommercialFlag>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CommercialFlag), input_name='CommercialFlag')), namespaceprefix_ , eol_)) + if self.CommercialPlusFlag is not None: + namespaceprefix_ = self.CommercialPlusFlag_nsprefix_ + ':' if (UseCapturedNS_ and self.CommercialPlusFlag_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommercialPlusFlag>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CommercialPlusFlag), input_name='CommercialPlusFlag')), namespaceprefix_ , eol_)) + if self.ExtraServices is not None: + namespaceprefix_ = self.ExtraServices_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraServices_nsprefix_) else '' + self.ExtraServices.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ExtraServices', pretty_print=pretty_print) + if self.AcceptanceDateTime is not None: + namespaceprefix_ = self.AcceptanceDateTime_nsprefix_ + ':' if (UseCapturedNS_ and self.AcceptanceDateTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAcceptanceDateTime>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AcceptanceDateTime), input_name='AcceptanceDateTime')), namespaceprefix_ , eol_)) + if self.DestinationPostalCode is not None: + namespaceprefix_ = self.DestinationPostalCode_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationPostalCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationPostalCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DestinationPostalCode), input_name='DestinationPostalCode')), namespaceprefix_ , eol_)) + if self.Content is not None: + namespaceprefix_ = self.Content_nsprefix_ + ':' if (UseCapturedNS_ and self.Content_nsprefix_) else '' + self.Content.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Content', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ID', node) + if value is not None and 'ID' not in already_processed: + already_processed.add('ID') + self.ID = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Pounds' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Pounds') + ival_ = self.gds_validate_integer(ival_, node, 'Pounds') + self.Pounds = ival_ + self.Pounds_nsprefix_ = child_.prefix + # validate type PoundsType + self.validate_PoundsType(self.Pounds) + elif nodeName_ == 'Ounces' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Ounces') + fval_ = self.gds_validate_decimal(fval_, node, 'Ounces') + self.Ounces = fval_ + self.Ounces_nsprefix_ = child_.prefix + # validate type OuncesType + self.validate_OuncesType(self.Ounces) + elif nodeName_ == 'Machinable': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'Machinable') + ival_ = self.gds_validate_boolean(ival_, node, 'Machinable') + self.Machinable = ival_ + self.Machinable_nsprefix_ = child_.prefix + elif nodeName_ == 'MailType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'MailType') + value_ = self.gds_validate_string(value_, node, 'MailType') + self.MailType = value_ + self.MailType_nsprefix_ = child_.prefix + elif nodeName_ == 'GXG': + obj_ = GXGType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.GXG = obj_ + obj_.original_tagname_ = 'GXG' + elif nodeName_ == 'ValueOfContents': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ValueOfContents') + value_ = self.gds_validate_string(value_, node, 'ValueOfContents') + self.ValueOfContents = value_ + self.ValueOfContents_nsprefix_ = child_.prefix + elif nodeName_ == 'Country': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Country') + value_ = self.gds_validate_string(value_, node, 'Country') + self.Country = value_ + self.Country_nsprefix_ = child_.prefix + elif nodeName_ == 'Container': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Container') + value_ = self.gds_validate_string(value_, node, 'Container') + self.Container = value_ + self.Container_nsprefix_ = child_.prefix + elif nodeName_ == 'Size': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Size') + value_ = self.gds_validate_string(value_, node, 'Size') + self.Size = value_ + self.Size_nsprefix_ = child_.prefix + elif nodeName_ == 'Width' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Width') + fval_ = self.gds_validate_decimal(fval_, node, 'Width') + self.Width = fval_ + self.Width_nsprefix_ = child_.prefix + elif nodeName_ == 'Length' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Length') + fval_ = self.gds_validate_decimal(fval_, node, 'Length') + self.Length = fval_ + self.Length_nsprefix_ = child_.prefix + elif nodeName_ == 'Height' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Height') + fval_ = self.gds_validate_decimal(fval_, node, 'Height') + self.Height = fval_ + self.Height_nsprefix_ = child_.prefix + elif nodeName_ == 'Girth' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Girth') + fval_ = self.gds_validate_decimal(fval_, node, 'Girth') + self.Girth = fval_ + self.Girth_nsprefix_ = child_.prefix + elif nodeName_ == 'OriginZip': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'OriginZip') + value_ = self.gds_validate_string(value_, node, 'OriginZip') + self.OriginZip = value_ + self.OriginZip_nsprefix_ = child_.prefix + elif nodeName_ == 'CommercialFlag': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CommercialFlag') + value_ = self.gds_validate_string(value_, node, 'CommercialFlag') + self.CommercialFlag = value_ + self.CommercialFlag_nsprefix_ = child_.prefix + elif nodeName_ == 'CommercialPlusFlag': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CommercialPlusFlag') + value_ = self.gds_validate_string(value_, node, 'CommercialPlusFlag') + self.CommercialPlusFlag = value_ + self.CommercialPlusFlag_nsprefix_ = child_.prefix + elif nodeName_ == 'ExtraServices': + obj_ = ExtraServicesType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ExtraServices = obj_ + obj_.original_tagname_ = 'ExtraServices' + elif nodeName_ == 'AcceptanceDateTime': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AcceptanceDateTime') + value_ = self.gds_validate_string(value_, node, 'AcceptanceDateTime') + self.AcceptanceDateTime = value_ + self.AcceptanceDateTime_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationPostalCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DestinationPostalCode') + value_ = self.gds_validate_string(value_, node, 'DestinationPostalCode') + self.DestinationPostalCode = value_ + self.DestinationPostalCode_nsprefix_ = child_.prefix + elif nodeName_ == 'Content': + obj_ = ContentType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Content = obj_ + obj_.original_tagname_ = 'Content' +# end class PackageType + + +class GXGType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, POBoxFlag=None, GiftFlag=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.POBoxFlag = POBoxFlag + self.POBoxFlag_nsprefix_ = None + self.GiftFlag = GiftFlag + self.GiftFlag_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, GXGType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if GXGType.subclass: + return GXGType.subclass(*args_, **kwargs_) + else: + return GXGType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_POBoxFlag(self): + return self.POBoxFlag + def set_POBoxFlag(self, POBoxFlag): + self.POBoxFlag = POBoxFlag + def get_GiftFlag(self): + return self.GiftFlag + def set_GiftFlag(self, GiftFlag): + self.GiftFlag = GiftFlag + def has__content(self): + if ( + self.POBoxFlag is not None or + self.GiftFlag is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GXGType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('GXGType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'GXGType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GXGType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GXGType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GXGType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GXGType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.POBoxFlag is not None: + namespaceprefix_ = self.POBoxFlag_nsprefix_ + ':' if (UseCapturedNS_ and self.POBoxFlag_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPOBoxFlag>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.POBoxFlag), input_name='POBoxFlag')), namespaceprefix_ , eol_)) + if self.GiftFlag is not None: + namespaceprefix_ = self.GiftFlag_nsprefix_ + ':' if (UseCapturedNS_ and self.GiftFlag_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGiftFlag>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.GiftFlag), input_name='GiftFlag')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'POBoxFlag': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'POBoxFlag') + value_ = self.gds_validate_string(value_, node, 'POBoxFlag') + self.POBoxFlag = value_ + self.POBoxFlag_nsprefix_ = child_.prefix + elif nodeName_ == 'GiftFlag': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'GiftFlag') + value_ = self.gds_validate_string(value_, node, 'GiftFlag') + self.GiftFlag = value_ + self.GiftFlag_nsprefix_ = child_.prefix +# end class GXGType + + +class ExtraServicesType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ExtraService=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if ExtraService is None: + self.ExtraService = [] + else: + self.ExtraService = ExtraService + self.ExtraService_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExtraServicesType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExtraServicesType.subclass: + return ExtraServicesType.subclass(*args_, **kwargs_) + else: + return ExtraServicesType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ExtraService(self): + return self.ExtraService + def set_ExtraService(self, ExtraService): + self.ExtraService = ExtraService + def add_ExtraService(self, value): + self.ExtraService.append(value) + def insert_ExtraService_at(self, index, value): + self.ExtraService.insert(index, value) + def replace_ExtraService_at(self, index, value): + self.ExtraService[index] = value + def has__content(self): + if ( + self.ExtraService + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExtraServicesType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExtraServicesType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtraServicesType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtraServicesType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtraServicesType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for ExtraService_ in self.ExtraService: + namespaceprefix_ = self.ExtraService_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraService_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sExtraService>%s%s' % (namespaceprefix_ , self.gds_format_integer(ExtraService_, input_name='ExtraService'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ExtraService' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ExtraService') + ival_ = self.gds_validate_integer(ival_, node, 'ExtraService') + self.ExtraService.append(ival_) + self.ExtraService_nsprefix_ = child_.prefix +# end class ExtraServicesType + + +class ContentType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ContentType_member=None, ContentDescription=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ContentType = ContentType_member + self.ContentType_nsprefix_ = None + self.ContentDescription = ContentDescription + self.ContentDescription_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ContentType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ContentType.subclass: + return ContentType.subclass(*args_, **kwargs_) + else: + return ContentType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ContentType(self): + return self.ContentType + def set_ContentType(self, ContentType): + self.ContentType = ContentType + def get_ContentDescription(self): + return self.ContentDescription + def set_ContentDescription(self, ContentDescription): + self.ContentDescription = ContentDescription + def has__content(self): + if ( + self.ContentType is not None or + self.ContentDescription is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContentType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ContentType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ContentType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContentType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ContentType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ContentType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContentType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ContentType is not None: + namespaceprefix_ = self.ContentType_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentType), input_name='ContentType')), namespaceprefix_ , eol_)) + if self.ContentDescription is not None: + namespaceprefix_ = self.ContentDescription_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentDescription_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentDescription>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentDescription), input_name='ContentDescription')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ContentType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentType') + value_ = self.gds_validate_string(value_, node, 'ContentType') + self.ContentType = value_ + self.ContentType_nsprefix_ = child_.prefix + elif nodeName_ == 'ContentDescription': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentDescription') + value_ = self.gds_validate_string(value_, node, 'ContentDescription') + self.ContentDescription = value_ + self.ContentDescription_nsprefix_ = child_.prefix +# end class ContentType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'IntlRateV2Request' + rootClass = IntlRateV2Request + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'IntlRateV2Request' + rootClass = IntlRateV2Request + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'IntlRateV2Request' + rootClass = IntlRateV2Request + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'IntlRateV2Request' + rootClass = IntlRateV2Request + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from intl_rate_v2_request import *\n\n') + sys.stdout.write('import intl_rate_v2_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "ContentType", + "ExtraServicesType", + "GXGType", + "IntlRateV2Request", + "PackageType" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/intl_rate_v2_response.py b/modules/connectors/usps/karrio/schemas/usps/intl_rate_v2_response.py new file mode 100644 index 0000000000..35675518a2 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/intl_rate_v2_response.py @@ -0,0 +1,2877 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:42 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/intl_rate_v2_response.py') +# +# Command line arguments: +# ./schemas/IntlRateV2Response.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/intl_rate_v2_response.py" ./schemas/IntlRateV2Response.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class IntlRateV2Response(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Package=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if Package is None: + self.Package = [] + else: + self.Package = Package + self.Package_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, IntlRateV2Response) + if subclass is not None: + return subclass(*args_, **kwargs_) + if IntlRateV2Response.subclass: + return IntlRateV2Response.subclass(*args_, **kwargs_) + else: + return IntlRateV2Response(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Package(self): + return self.Package + def set_Package(self, Package): + self.Package = Package + def add_Package(self, value): + self.Package.append(value) + def insert_Package_at(self, index, value): + self.Package.insert(index, value) + def replace_Package_at(self, index, value): + self.Package[index] = value + def has__content(self): + if ( + self.Package + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IntlRateV2Response', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('IntlRateV2Response') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'IntlRateV2Response': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IntlRateV2Response') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IntlRateV2Response', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IntlRateV2Response'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IntlRateV2Response', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for Package_ in self.Package: + namespaceprefix_ = self.Package_nsprefix_ + ':' if (UseCapturedNS_ and self.Package_nsprefix_) else '' + Package_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Package', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Package': + obj_ = PackageType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Package.append(obj_) + obj_.original_tagname_ = 'Package' +# end class IntlRateV2Response + + +class PackageType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ID=None, Prohibitions=None, Restrictions=None, Observations=None, CustomsForms=None, ExpressMail=None, AreasServed=None, AdditionalRestrictions=None, Content=None, Service=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ID = _cast(None, ID) + self.ID_nsprefix_ = None + self.Prohibitions = Prohibitions + self.Prohibitions_nsprefix_ = None + self.Restrictions = Restrictions + self.Restrictions_nsprefix_ = None + self.Observations = Observations + self.Observations_nsprefix_ = None + self.CustomsForms = CustomsForms + self.CustomsForms_nsprefix_ = None + self.ExpressMail = ExpressMail + self.ExpressMail_nsprefix_ = None + self.AreasServed = AreasServed + self.AreasServed_nsprefix_ = None + self.AdditionalRestrictions = AdditionalRestrictions + self.AdditionalRestrictions_nsprefix_ = None + self.Content = Content + self.Content_nsprefix_ = None + if Service is None: + self.Service = [] + else: + self.Service = Service + self.Service_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PackageType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PackageType.subclass: + return PackageType.subclass(*args_, **kwargs_) + else: + return PackageType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Prohibitions(self): + return self.Prohibitions + def set_Prohibitions(self, Prohibitions): + self.Prohibitions = Prohibitions + def get_Restrictions(self): + return self.Restrictions + def set_Restrictions(self, Restrictions): + self.Restrictions = Restrictions + def get_Observations(self): + return self.Observations + def set_Observations(self, Observations): + self.Observations = Observations + def get_CustomsForms(self): + return self.CustomsForms + def set_CustomsForms(self, CustomsForms): + self.CustomsForms = CustomsForms + def get_ExpressMail(self): + return self.ExpressMail + def set_ExpressMail(self, ExpressMail): + self.ExpressMail = ExpressMail + def get_AreasServed(self): + return self.AreasServed + def set_AreasServed(self, AreasServed): + self.AreasServed = AreasServed + def get_AdditionalRestrictions(self): + return self.AdditionalRestrictions + def set_AdditionalRestrictions(self, AdditionalRestrictions): + self.AdditionalRestrictions = AdditionalRestrictions + def get_Content(self): + return self.Content + def set_Content(self, Content): + self.Content = Content + def get_Service(self): + return self.Service + def set_Service(self, Service): + self.Service = Service + def add_Service(self, value): + self.Service.append(value) + def insert_Service_at(self, index, value): + self.Service.insert(index, value) + def replace_Service_at(self, index, value): + self.Service[index] = value + def get_ID(self): + return self.ID + def set_ID(self, ID): + self.ID = ID + def has__content(self): + if ( + self.Prohibitions is not None or + self.Restrictions is not None or + self.Observations is not None or + self.CustomsForms is not None or + self.ExpressMail is not None or + self.AreasServed is not None or + self.AdditionalRestrictions is not None or + self.Content is not None or + self.Service + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PackageType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PackageType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PackageType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PackageType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PackageType'): + if self.ID is not None and 'ID' not in already_processed: + already_processed.add('ID') + outfile.write(' ID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ID), input_name='ID')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Prohibitions is not None: + namespaceprefix_ = self.Prohibitions_nsprefix_ + ':' if (UseCapturedNS_ and self.Prohibitions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sProhibitions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Prohibitions), input_name='Prohibitions')), namespaceprefix_ , eol_)) + if self.Restrictions is not None: + namespaceprefix_ = self.Restrictions_nsprefix_ + ':' if (UseCapturedNS_ and self.Restrictions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRestrictions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Restrictions), input_name='Restrictions')), namespaceprefix_ , eol_)) + if self.Observations is not None: + namespaceprefix_ = self.Observations_nsprefix_ + ':' if (UseCapturedNS_ and self.Observations_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sObservations>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Observations), input_name='Observations')), namespaceprefix_ , eol_)) + if self.CustomsForms is not None: + namespaceprefix_ = self.CustomsForms_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomsForms_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomsForms>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomsForms), input_name='CustomsForms')), namespaceprefix_ , eol_)) + if self.ExpressMail is not None: + namespaceprefix_ = self.ExpressMail_nsprefix_ + ':' if (UseCapturedNS_ and self.ExpressMail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sExpressMail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ExpressMail), input_name='ExpressMail')), namespaceprefix_ , eol_)) + if self.AreasServed is not None: + namespaceprefix_ = self.AreasServed_nsprefix_ + ':' if (UseCapturedNS_ and self.AreasServed_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAreasServed>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AreasServed), input_name='AreasServed')), namespaceprefix_ , eol_)) + if self.AdditionalRestrictions is not None: + namespaceprefix_ = self.AdditionalRestrictions_nsprefix_ + ':' if (UseCapturedNS_ and self.AdditionalRestrictions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAdditionalRestrictions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AdditionalRestrictions), input_name='AdditionalRestrictions')), namespaceprefix_ , eol_)) + if self.Content is not None: + namespaceprefix_ = self.Content_nsprefix_ + ':' if (UseCapturedNS_ and self.Content_nsprefix_) else '' + self.Content.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Content', pretty_print=pretty_print) + for Service_ in self.Service: + namespaceprefix_ = self.Service_nsprefix_ + ':' if (UseCapturedNS_ and self.Service_nsprefix_) else '' + Service_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Service', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ID', node) + if value is not None and 'ID' not in already_processed: + already_processed.add('ID') + self.ID = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Prohibitions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Prohibitions') + value_ = self.gds_validate_string(value_, node, 'Prohibitions') + self.Prohibitions = value_ + self.Prohibitions_nsprefix_ = child_.prefix + elif nodeName_ == 'Restrictions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Restrictions') + value_ = self.gds_validate_string(value_, node, 'Restrictions') + self.Restrictions = value_ + self.Restrictions_nsprefix_ = child_.prefix + elif nodeName_ == 'Observations': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Observations') + value_ = self.gds_validate_string(value_, node, 'Observations') + self.Observations = value_ + self.Observations_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomsForms': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomsForms') + value_ = self.gds_validate_string(value_, node, 'CustomsForms') + self.CustomsForms = value_ + self.CustomsForms_nsprefix_ = child_.prefix + elif nodeName_ == 'ExpressMail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ExpressMail') + value_ = self.gds_validate_string(value_, node, 'ExpressMail') + self.ExpressMail = value_ + self.ExpressMail_nsprefix_ = child_.prefix + elif nodeName_ == 'AreasServed': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AreasServed') + value_ = self.gds_validate_string(value_, node, 'AreasServed') + self.AreasServed = value_ + self.AreasServed_nsprefix_ = child_.prefix + elif nodeName_ == 'AdditionalRestrictions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AdditionalRestrictions') + value_ = self.gds_validate_string(value_, node, 'AdditionalRestrictions') + self.AdditionalRestrictions = value_ + self.AdditionalRestrictions_nsprefix_ = child_.prefix + elif nodeName_ == 'Content': + obj_ = ContentType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Content = obj_ + obj_.original_tagname_ = 'Content' + elif nodeName_ == 'Service': + obj_ = ServiceType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Service.append(obj_) + obj_.original_tagname_ = 'Service' +# end class PackageType + + +class ContentType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ContentType_member=None, ContentDescription=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ContentType = ContentType_member + self.ContentType_nsprefix_ = None + self.ContentDescription = ContentDescription + self.ContentDescription_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ContentType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ContentType.subclass: + return ContentType.subclass(*args_, **kwargs_) + else: + return ContentType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ContentType(self): + return self.ContentType + def set_ContentType(self, ContentType): + self.ContentType = ContentType + def get_ContentDescription(self): + return self.ContentDescription + def set_ContentDescription(self, ContentDescription): + self.ContentDescription = ContentDescription + def has__content(self): + if ( + self.ContentType is not None or + self.ContentDescription is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContentType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ContentType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ContentType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContentType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ContentType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ContentType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContentType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ContentType is not None: + namespaceprefix_ = self.ContentType_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentType), input_name='ContentType')), namespaceprefix_ , eol_)) + if self.ContentDescription is not None: + namespaceprefix_ = self.ContentDescription_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentDescription_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentDescription>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentDescription), input_name='ContentDescription')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ContentType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentType') + value_ = self.gds_validate_string(value_, node, 'ContentType') + self.ContentType = value_ + self.ContentType_nsprefix_ = child_.prefix + elif nodeName_ == 'ContentDescription': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentDescription') + value_ = self.gds_validate_string(value_, node, 'ContentDescription') + self.ContentDescription = value_ + self.ContentDescription_nsprefix_ = child_.prefix +# end class ContentType + + +class ServiceType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ID=None, Pounds=None, Ounces=None, Machinable=None, MailType=None, GXG=None, Container=None, Width=None, Length=None, Height=None, Girth=None, Country=None, Postage=None, CommercialPostage=None, CommercialPlusPostage=None, ExtraServices=None, ValueOfContents=None, InsComment=None, ParcelIndemnityCoverage=None, SvcCommitments=None, SvcDescription=None, MaxDimensions=None, MaxWeight=None, GuaranteeAvailability=None, GXGLocations=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ID = _cast(int, ID) + self.ID_nsprefix_ = None + self.Pounds = Pounds + self.Pounds_nsprefix_ = None + self.Ounces = Ounces + self.Ounces_nsprefix_ = None + self.Machinable = Machinable + self.Machinable_nsprefix_ = None + self.MailType = MailType + self.MailType_nsprefix_ = None + self.GXG = GXG + self.GXG_nsprefix_ = None + self.Container = Container + self.Container_nsprefix_ = None + self.Width = Width + self.Width_nsprefix_ = None + self.Length = Length + self.Length_nsprefix_ = None + self.Height = Height + self.Height_nsprefix_ = None + self.Girth = Girth + self.Girth_nsprefix_ = None + self.Country = Country + self.Country_nsprefix_ = None + self.Postage = Postage + self.Postage_nsprefix_ = None + self.CommercialPostage = CommercialPostage + self.CommercialPostage_nsprefix_ = None + self.CommercialPlusPostage = CommercialPlusPostage + self.CommercialPlusPostage_nsprefix_ = None + self.ExtraServices = ExtraServices + self.ExtraServices_nsprefix_ = None + self.ValueOfContents = ValueOfContents + self.ValueOfContents_nsprefix_ = None + self.InsComment = InsComment + self.InsComment_nsprefix_ = None + self.ParcelIndemnityCoverage = ParcelIndemnityCoverage + self.ParcelIndemnityCoverage_nsprefix_ = None + self.SvcCommitments = SvcCommitments + self.SvcCommitments_nsprefix_ = None + self.SvcDescription = SvcDescription + self.SvcDescription_nsprefix_ = None + self.MaxDimensions = MaxDimensions + self.MaxDimensions_nsprefix_ = None + self.MaxWeight = MaxWeight + self.MaxWeight_nsprefix_ = None + self.GuaranteeAvailability = GuaranteeAvailability + self.GuaranteeAvailability_nsprefix_ = None + self.GXGLocations = GXGLocations + self.GXGLocations_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ServiceType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ServiceType.subclass: + return ServiceType.subclass(*args_, **kwargs_) + else: + return ServiceType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Pounds(self): + return self.Pounds + def set_Pounds(self, Pounds): + self.Pounds = Pounds + def get_Ounces(self): + return self.Ounces + def set_Ounces(self, Ounces): + self.Ounces = Ounces + def get_Machinable(self): + return self.Machinable + def set_Machinable(self, Machinable): + self.Machinable = Machinable + def get_MailType(self): + return self.MailType + def set_MailType(self, MailType): + self.MailType = MailType + def get_GXG(self): + return self.GXG + def set_GXG(self, GXG): + self.GXG = GXG + def get_Container(self): + return self.Container + def set_Container(self, Container): + self.Container = Container + def get_Width(self): + return self.Width + def set_Width(self, Width): + self.Width = Width + def get_Length(self): + return self.Length + def set_Length(self, Length): + self.Length = Length + def get_Height(self): + return self.Height + def set_Height(self, Height): + self.Height = Height + def get_Girth(self): + return self.Girth + def set_Girth(self, Girth): + self.Girth = Girth + def get_Country(self): + return self.Country + def set_Country(self, Country): + self.Country = Country + def get_Postage(self): + return self.Postage + def set_Postage(self, Postage): + self.Postage = Postage + def get_CommercialPostage(self): + return self.CommercialPostage + def set_CommercialPostage(self, CommercialPostage): + self.CommercialPostage = CommercialPostage + def get_CommercialPlusPostage(self): + return self.CommercialPlusPostage + def set_CommercialPlusPostage(self, CommercialPlusPostage): + self.CommercialPlusPostage = CommercialPlusPostage + def get_ExtraServices(self): + return self.ExtraServices + def set_ExtraServices(self, ExtraServices): + self.ExtraServices = ExtraServices + def get_ValueOfContents(self): + return self.ValueOfContents + def set_ValueOfContents(self, ValueOfContents): + self.ValueOfContents = ValueOfContents + def get_InsComment(self): + return self.InsComment + def set_InsComment(self, InsComment): + self.InsComment = InsComment + def get_ParcelIndemnityCoverage(self): + return self.ParcelIndemnityCoverage + def set_ParcelIndemnityCoverage(self, ParcelIndemnityCoverage): + self.ParcelIndemnityCoverage = ParcelIndemnityCoverage + def get_SvcCommitments(self): + return self.SvcCommitments + def set_SvcCommitments(self, SvcCommitments): + self.SvcCommitments = SvcCommitments + def get_SvcDescription(self): + return self.SvcDescription + def set_SvcDescription(self, SvcDescription): + self.SvcDescription = SvcDescription + def get_MaxDimensions(self): + return self.MaxDimensions + def set_MaxDimensions(self, MaxDimensions): + self.MaxDimensions = MaxDimensions + def get_MaxWeight(self): + return self.MaxWeight + def set_MaxWeight(self, MaxWeight): + self.MaxWeight = MaxWeight + def get_GuaranteeAvailability(self): + return self.GuaranteeAvailability + def set_GuaranteeAvailability(self, GuaranteeAvailability): + self.GuaranteeAvailability = GuaranteeAvailability + def get_GXGLocations(self): + return self.GXGLocations + def set_GXGLocations(self, GXGLocations): + self.GXGLocations = GXGLocations + def get_ID(self): + return self.ID + def set_ID(self, ID): + self.ID = ID + def has__content(self): + if ( + self.Pounds is not None or + self.Ounces is not None or + self.Machinable is not None or + self.MailType is not None or + self.GXG is not None or + self.Container is not None or + self.Width is not None or + self.Length is not None or + self.Height is not None or + self.Girth is not None or + self.Country is not None or + self.Postage is not None or + self.CommercialPostage is not None or + self.CommercialPlusPostage is not None or + self.ExtraServices is not None or + self.ValueOfContents is not None or + self.InsComment is not None or + self.ParcelIndemnityCoverage is not None or + self.SvcCommitments is not None or + self.SvcDescription is not None or + self.MaxDimensions is not None or + self.MaxWeight is not None or + self.GuaranteeAvailability is not None or + self.GXGLocations is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ServiceType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ServiceType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ServiceType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ServiceType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ServiceType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ServiceType'): + if self.ID is not None and 'ID' not in already_processed: + already_processed.add('ID') + outfile.write(' ID="%s"' % self.gds_format_integer(self.ID, input_name='ID')) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ServiceType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Pounds is not None: + namespaceprefix_ = self.Pounds_nsprefix_ + ':' if (UseCapturedNS_ and self.Pounds_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPounds>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Pounds, input_name='Pounds'), namespaceprefix_ , eol_)) + if self.Ounces is not None: + namespaceprefix_ = self.Ounces_nsprefix_ + ':' if (UseCapturedNS_ and self.Ounces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOunces>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Ounces, input_name='Ounces'), namespaceprefix_ , eol_)) + if self.Machinable is not None: + namespaceprefix_ = self.Machinable_nsprefix_ + ':' if (UseCapturedNS_ and self.Machinable_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMachinable>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Machinable), input_name='Machinable')), namespaceprefix_ , eol_)) + if self.MailType is not None: + namespaceprefix_ = self.MailType_nsprefix_ + ':' if (UseCapturedNS_ and self.MailType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMailType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MailType), input_name='MailType')), namespaceprefix_ , eol_)) + if self.GXG is not None: + namespaceprefix_ = self.GXG_nsprefix_ + ':' if (UseCapturedNS_ and self.GXG_nsprefix_) else '' + self.GXG.export(outfile, level, namespaceprefix_, namespacedef_='', name_='GXG', pretty_print=pretty_print) + if self.Container is not None: + namespaceprefix_ = self.Container_nsprefix_ + ':' if (UseCapturedNS_ and self.Container_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContainer>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Container), input_name='Container')), namespaceprefix_ , eol_)) + if self.Width is not None: + namespaceprefix_ = self.Width_nsprefix_ + ':' if (UseCapturedNS_ and self.Width_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sWidth>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Width, input_name='Width'), namespaceprefix_ , eol_)) + if self.Length is not None: + namespaceprefix_ = self.Length_nsprefix_ + ':' if (UseCapturedNS_ and self.Length_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLength>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Length, input_name='Length'), namespaceprefix_ , eol_)) + if self.Height is not None: + namespaceprefix_ = self.Height_nsprefix_ + ':' if (UseCapturedNS_ and self.Height_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHeight>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Height, input_name='Height'), namespaceprefix_ , eol_)) + if self.Girth is not None: + namespaceprefix_ = self.Girth_nsprefix_ + ':' if (UseCapturedNS_ and self.Girth_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGirth>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Girth, input_name='Girth'), namespaceprefix_ , eol_)) + if self.Country is not None: + namespaceprefix_ = self.Country_nsprefix_ + ':' if (UseCapturedNS_ and self.Country_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCountry>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Country), input_name='Country')), namespaceprefix_ , eol_)) + if self.Postage is not None: + namespaceprefix_ = self.Postage_nsprefix_ + ':' if (UseCapturedNS_ and self.Postage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPostage>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Postage, input_name='Postage'), namespaceprefix_ , eol_)) + if self.CommercialPostage is not None: + namespaceprefix_ = self.CommercialPostage_nsprefix_ + ':' if (UseCapturedNS_ and self.CommercialPostage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommercialPostage>%s%s' % (namespaceprefix_ , self.gds_format_float(self.CommercialPostage, input_name='CommercialPostage'), namespaceprefix_ , eol_)) + if self.CommercialPlusPostage is not None: + namespaceprefix_ = self.CommercialPlusPostage_nsprefix_ + ':' if (UseCapturedNS_ and self.CommercialPlusPostage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommercialPlusPostage>%s%s' % (namespaceprefix_ , self.gds_format_float(self.CommercialPlusPostage, input_name='CommercialPlusPostage'), namespaceprefix_ , eol_)) + if self.ExtraServices is not None: + namespaceprefix_ = self.ExtraServices_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraServices_nsprefix_) else '' + self.ExtraServices.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ExtraServices', pretty_print=pretty_print) + if self.ValueOfContents is not None: + namespaceprefix_ = self.ValueOfContents_nsprefix_ + ':' if (UseCapturedNS_ and self.ValueOfContents_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sValueOfContents>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValueOfContents, input_name='ValueOfContents'), namespaceprefix_ , eol_)) + if self.InsComment is not None: + namespaceprefix_ = self.InsComment_nsprefix_ + ':' if (UseCapturedNS_ and self.InsComment_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInsComment>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InsComment), input_name='InsComment')), namespaceprefix_ , eol_)) + if self.ParcelIndemnityCoverage is not None: + namespaceprefix_ = self.ParcelIndemnityCoverage_nsprefix_ + ':' if (UseCapturedNS_ and self.ParcelIndemnityCoverage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sParcelIndemnityCoverage>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ParcelIndemnityCoverage, input_name='ParcelIndemnityCoverage'), namespaceprefix_ , eol_)) + if self.SvcCommitments is not None: + namespaceprefix_ = self.SvcCommitments_nsprefix_ + ':' if (UseCapturedNS_ and self.SvcCommitments_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSvcCommitments>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SvcCommitments), input_name='SvcCommitments')), namespaceprefix_ , eol_)) + if self.SvcDescription is not None: + namespaceprefix_ = self.SvcDescription_nsprefix_ + ':' if (UseCapturedNS_ and self.SvcDescription_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSvcDescription>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SvcDescription), input_name='SvcDescription')), namespaceprefix_ , eol_)) + if self.MaxDimensions is not None: + namespaceprefix_ = self.MaxDimensions_nsprefix_ + ':' if (UseCapturedNS_ and self.MaxDimensions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMaxDimensions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MaxDimensions), input_name='MaxDimensions')), namespaceprefix_ , eol_)) + if self.MaxWeight is not None: + namespaceprefix_ = self.MaxWeight_nsprefix_ + ':' if (UseCapturedNS_ and self.MaxWeight_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMaxWeight>%s%s' % (namespaceprefix_ , self.gds_format_float(self.MaxWeight, input_name='MaxWeight'), namespaceprefix_ , eol_)) + if self.GuaranteeAvailability is not None: + namespaceprefix_ = self.GuaranteeAvailability_nsprefix_ + ':' if (UseCapturedNS_ and self.GuaranteeAvailability_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGuaranteeAvailability>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.GuaranteeAvailability), input_name='GuaranteeAvailability')), namespaceprefix_ , eol_)) + if self.GXGLocations is not None: + namespaceprefix_ = self.GXGLocations_nsprefix_ + ':' if (UseCapturedNS_ and self.GXGLocations_nsprefix_) else '' + self.GXGLocations.export(outfile, level, namespaceprefix_, namespacedef_='', name_='GXGLocations', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ID', node) + if value is not None and 'ID' not in already_processed: + already_processed.add('ID') + self.ID = self.gds_parse_integer(value, node, 'ID') + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Pounds' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Pounds') + fval_ = self.gds_validate_float(fval_, node, 'Pounds') + self.Pounds = fval_ + self.Pounds_nsprefix_ = child_.prefix + elif nodeName_ == 'Ounces' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Ounces') + fval_ = self.gds_validate_float(fval_, node, 'Ounces') + self.Ounces = fval_ + self.Ounces_nsprefix_ = child_.prefix + elif nodeName_ == 'Machinable': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Machinable') + value_ = self.gds_validate_string(value_, node, 'Machinable') + self.Machinable = value_ + self.Machinable_nsprefix_ = child_.prefix + elif nodeName_ == 'MailType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'MailType') + value_ = self.gds_validate_string(value_, node, 'MailType') + self.MailType = value_ + self.MailType_nsprefix_ = child_.prefix + elif nodeName_ == 'GXG': + obj_ = GXGType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.GXG = obj_ + obj_.original_tagname_ = 'GXG' + elif nodeName_ == 'Container': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Container') + value_ = self.gds_validate_string(value_, node, 'Container') + self.Container = value_ + self.Container_nsprefix_ = child_.prefix + elif nodeName_ == 'Width' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Width') + fval_ = self.gds_validate_float(fval_, node, 'Width') + self.Width = fval_ + self.Width_nsprefix_ = child_.prefix + elif nodeName_ == 'Length' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Length') + fval_ = self.gds_validate_float(fval_, node, 'Length') + self.Length = fval_ + self.Length_nsprefix_ = child_.prefix + elif nodeName_ == 'Height' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Height') + fval_ = self.gds_validate_float(fval_, node, 'Height') + self.Height = fval_ + self.Height_nsprefix_ = child_.prefix + elif nodeName_ == 'Girth' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Girth') + fval_ = self.gds_validate_float(fval_, node, 'Girth') + self.Girth = fval_ + self.Girth_nsprefix_ = child_.prefix + elif nodeName_ == 'Country': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Country') + value_ = self.gds_validate_string(value_, node, 'Country') + self.Country = value_ + self.Country_nsprefix_ = child_.prefix + elif nodeName_ == 'Postage' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Postage') + fval_ = self.gds_validate_float(fval_, node, 'Postage') + self.Postage = fval_ + self.Postage_nsprefix_ = child_.prefix + elif nodeName_ == 'CommercialPostage' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'CommercialPostage') + fval_ = self.gds_validate_float(fval_, node, 'CommercialPostage') + self.CommercialPostage = fval_ + self.CommercialPostage_nsprefix_ = child_.prefix + elif nodeName_ == 'CommercialPlusPostage' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'CommercialPlusPostage') + fval_ = self.gds_validate_float(fval_, node, 'CommercialPlusPostage') + self.CommercialPlusPostage = fval_ + self.CommercialPlusPostage_nsprefix_ = child_.prefix + elif nodeName_ == 'ExtraServices': + obj_ = ExtraServicesType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ExtraServices = obj_ + obj_.original_tagname_ = 'ExtraServices' + elif nodeName_ == 'ValueOfContents' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'ValueOfContents') + fval_ = self.gds_validate_float(fval_, node, 'ValueOfContents') + self.ValueOfContents = fval_ + self.ValueOfContents_nsprefix_ = child_.prefix + elif nodeName_ == 'InsComment': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'InsComment') + value_ = self.gds_validate_string(value_, node, 'InsComment') + self.InsComment = value_ + self.InsComment_nsprefix_ = child_.prefix + elif nodeName_ == 'ParcelIndemnityCoverage' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'ParcelIndemnityCoverage') + fval_ = self.gds_validate_float(fval_, node, 'ParcelIndemnityCoverage') + self.ParcelIndemnityCoverage = fval_ + self.ParcelIndemnityCoverage_nsprefix_ = child_.prefix + elif nodeName_ == 'SvcCommitments': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SvcCommitments') + value_ = self.gds_validate_string(value_, node, 'SvcCommitments') + self.SvcCommitments = value_ + self.SvcCommitments_nsprefix_ = child_.prefix + elif nodeName_ == 'SvcDescription': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SvcDescription') + value_ = self.gds_validate_string(value_, node, 'SvcDescription') + self.SvcDescription = value_ + self.SvcDescription_nsprefix_ = child_.prefix + elif nodeName_ == 'MaxDimensions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'MaxDimensions') + value_ = self.gds_validate_string(value_, node, 'MaxDimensions') + self.MaxDimensions = value_ + self.MaxDimensions_nsprefix_ = child_.prefix + elif nodeName_ == 'MaxWeight' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'MaxWeight') + fval_ = self.gds_validate_float(fval_, node, 'MaxWeight') + self.MaxWeight = fval_ + self.MaxWeight_nsprefix_ = child_.prefix + elif nodeName_ == 'GuaranteeAvailability': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'GuaranteeAvailability') + value_ = self.gds_validate_string(value_, node, 'GuaranteeAvailability') + self.GuaranteeAvailability = value_ + self.GuaranteeAvailability_nsprefix_ = child_.prefix + elif nodeName_ == 'GXGLocations': + obj_ = GXGLocationsType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.GXGLocations = obj_ + obj_.original_tagname_ = 'GXGLocations' +# end class ServiceType + + +class GXGType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, POBoxFlag=None, GiftFlag=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.POBoxFlag = POBoxFlag + self.POBoxFlag_nsprefix_ = None + self.GiftFlag = GiftFlag + self.GiftFlag_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, GXGType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if GXGType.subclass: + return GXGType.subclass(*args_, **kwargs_) + else: + return GXGType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_POBoxFlag(self): + return self.POBoxFlag + def set_POBoxFlag(self, POBoxFlag): + self.POBoxFlag = POBoxFlag + def get_GiftFlag(self): + return self.GiftFlag + def set_GiftFlag(self, GiftFlag): + self.GiftFlag = GiftFlag + def has__content(self): + if ( + self.POBoxFlag is not None or + self.GiftFlag is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GXGType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('GXGType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'GXGType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GXGType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GXGType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GXGType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GXGType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.POBoxFlag is not None: + namespaceprefix_ = self.POBoxFlag_nsprefix_ + ':' if (UseCapturedNS_ and self.POBoxFlag_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPOBoxFlag>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.POBoxFlag), input_name='POBoxFlag')), namespaceprefix_ , eol_)) + if self.GiftFlag is not None: + namespaceprefix_ = self.GiftFlag_nsprefix_ + ':' if (UseCapturedNS_ and self.GiftFlag_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGiftFlag>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.GiftFlag), input_name='GiftFlag')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'POBoxFlag': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'POBoxFlag') + value_ = self.gds_validate_string(value_, node, 'POBoxFlag') + self.POBoxFlag = value_ + self.POBoxFlag_nsprefix_ = child_.prefix + elif nodeName_ == 'GiftFlag': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'GiftFlag') + value_ = self.gds_validate_string(value_, node, 'GiftFlag') + self.GiftFlag = value_ + self.GiftFlag_nsprefix_ = child_.prefix +# end class GXGType + + +class ExtraServicesType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ExtraService=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if ExtraService is None: + self.ExtraService = [] + else: + self.ExtraService = ExtraService + self.ExtraService_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExtraServicesType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExtraServicesType.subclass: + return ExtraServicesType.subclass(*args_, **kwargs_) + else: + return ExtraServicesType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ExtraService(self): + return self.ExtraService + def set_ExtraService(self, ExtraService): + self.ExtraService = ExtraService + def add_ExtraService(self, value): + self.ExtraService.append(value) + def insert_ExtraService_at(self, index, value): + self.ExtraService.insert(index, value) + def replace_ExtraService_at(self, index, value): + self.ExtraService[index] = value + def has__content(self): + if ( + self.ExtraService + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExtraServicesType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExtraServicesType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtraServicesType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtraServicesType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtraServicesType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for ExtraService_ in self.ExtraService: + namespaceprefix_ = self.ExtraService_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraService_nsprefix_) else '' + ExtraService_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ExtraService', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ExtraService': + obj_ = ExtraServiceType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ExtraService.append(obj_) + obj_.original_tagname_ = 'ExtraService' +# end class ExtraServicesType + + +class ExtraServiceType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ServiceID=None, ServiceName=None, Available=None, OnlineAvailable=None, Price=None, OnlinePrice=None, DeclaredValueRequired=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ServiceID = ServiceID + self.ServiceID_nsprefix_ = None + self.ServiceName = ServiceName + self.ServiceName_nsprefix_ = None + self.Available = Available + self.Available_nsprefix_ = None + self.OnlineAvailable = OnlineAvailable + self.OnlineAvailable_nsprefix_ = None + self.Price = Price + self.Price_nsprefix_ = None + self.OnlinePrice = OnlinePrice + self.OnlinePrice_nsprefix_ = None + self.DeclaredValueRequired = DeclaredValueRequired + self.DeclaredValueRequired_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExtraServiceType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExtraServiceType.subclass: + return ExtraServiceType.subclass(*args_, **kwargs_) + else: + return ExtraServiceType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ServiceID(self): + return self.ServiceID + def set_ServiceID(self, ServiceID): + self.ServiceID = ServiceID + def get_ServiceName(self): + return self.ServiceName + def set_ServiceName(self, ServiceName): + self.ServiceName = ServiceName + def get_Available(self): + return self.Available + def set_Available(self, Available): + self.Available = Available + def get_OnlineAvailable(self): + return self.OnlineAvailable + def set_OnlineAvailable(self, OnlineAvailable): + self.OnlineAvailable = OnlineAvailable + def get_Price(self): + return self.Price + def set_Price(self, Price): + self.Price = Price + def get_OnlinePrice(self): + return self.OnlinePrice + def set_OnlinePrice(self, OnlinePrice): + self.OnlinePrice = OnlinePrice + def get_DeclaredValueRequired(self): + return self.DeclaredValueRequired + def set_DeclaredValueRequired(self, DeclaredValueRequired): + self.DeclaredValueRequired = DeclaredValueRequired + def has__content(self): + if ( + self.ServiceID is not None or + self.ServiceName is not None or + self.Available is not None or + self.OnlineAvailable is not None or + self.Price is not None or + self.OnlinePrice is not None or + self.DeclaredValueRequired is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServiceType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExtraServiceType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExtraServiceType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtraServiceType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtraServiceType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtraServiceType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServiceType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ServiceID is not None: + namespaceprefix_ = self.ServiceID_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceID), input_name='ServiceID')), namespaceprefix_ , eol_)) + if self.ServiceName is not None: + namespaceprefix_ = self.ServiceName_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceName), input_name='ServiceName')), namespaceprefix_ , eol_)) + if self.Available is not None: + namespaceprefix_ = self.Available_nsprefix_ + ':' if (UseCapturedNS_ and self.Available_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAvailable>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Available), input_name='Available')), namespaceprefix_ , eol_)) + if self.OnlineAvailable is not None: + namespaceprefix_ = self.OnlineAvailable_nsprefix_ + ':' if (UseCapturedNS_ and self.OnlineAvailable_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOnlineAvailable>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.OnlineAvailable), input_name='OnlineAvailable')), namespaceprefix_ , eol_)) + if self.Price is not None: + namespaceprefix_ = self.Price_nsprefix_ + ':' if (UseCapturedNS_ and self.Price_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPrice>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Price, input_name='Price'), namespaceprefix_ , eol_)) + if self.OnlinePrice is not None: + namespaceprefix_ = self.OnlinePrice_nsprefix_ + ':' if (UseCapturedNS_ and self.OnlinePrice_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOnlinePrice>%s%s' % (namespaceprefix_ , self.gds_format_float(self.OnlinePrice, input_name='OnlinePrice'), namespaceprefix_ , eol_)) + if self.DeclaredValueRequired is not None: + namespaceprefix_ = self.DeclaredValueRequired_nsprefix_ + ':' if (UseCapturedNS_ and self.DeclaredValueRequired_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDeclaredValueRequired>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DeclaredValueRequired), input_name='DeclaredValueRequired')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ServiceID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceID') + value_ = self.gds_validate_string(value_, node, 'ServiceID') + self.ServiceID = value_ + self.ServiceID_nsprefix_ = child_.prefix + elif nodeName_ == 'ServiceName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceName') + value_ = self.gds_validate_string(value_, node, 'ServiceName') + self.ServiceName = value_ + self.ServiceName_nsprefix_ = child_.prefix + elif nodeName_ == 'Available': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Available') + value_ = self.gds_validate_string(value_, node, 'Available') + self.Available = value_ + self.Available_nsprefix_ = child_.prefix + elif nodeName_ == 'OnlineAvailable': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'OnlineAvailable') + value_ = self.gds_validate_string(value_, node, 'OnlineAvailable') + self.OnlineAvailable = value_ + self.OnlineAvailable_nsprefix_ = child_.prefix + elif nodeName_ == 'Price' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Price') + fval_ = self.gds_validate_float(fval_, node, 'Price') + self.Price = fval_ + self.Price_nsprefix_ = child_.prefix + elif nodeName_ == 'OnlinePrice' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'OnlinePrice') + fval_ = self.gds_validate_float(fval_, node, 'OnlinePrice') + self.OnlinePrice = fval_ + self.OnlinePrice_nsprefix_ = child_.prefix + elif nodeName_ == 'DeclaredValueRequired': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DeclaredValueRequired') + value_ = self.gds_validate_string(value_, node, 'DeclaredValueRequired') + self.DeclaredValueRequired = value_ + self.DeclaredValueRequired_nsprefix_ = child_.prefix +# end class ExtraServiceType + + +class GXGLocationsType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, PostOffice=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.PostOffice = PostOffice + self.PostOffice_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, GXGLocationsType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if GXGLocationsType.subclass: + return GXGLocationsType.subclass(*args_, **kwargs_) + else: + return GXGLocationsType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_PostOffice(self): + return self.PostOffice + def set_PostOffice(self, PostOffice): + self.PostOffice = PostOffice + def has__content(self): + if ( + self.PostOffice is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GXGLocationsType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('GXGLocationsType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'GXGLocationsType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GXGLocationsType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GXGLocationsType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GXGLocationsType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GXGLocationsType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.PostOffice is not None: + namespaceprefix_ = self.PostOffice_nsprefix_ + ':' if (UseCapturedNS_ and self.PostOffice_nsprefix_) else '' + self.PostOffice.export(outfile, level, namespaceprefix_, namespacedef_='', name_='PostOffice', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'PostOffice': + obj_ = PostOfficeType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.PostOffice = obj_ + obj_.original_tagname_ = 'PostOffice' +# end class GXGLocationsType + + +class PostOfficeType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Name=None, Address=None, City=None, State=None, ZipCode=None, RetailGXGCutOffTime=None, SaturDayCutOffTime=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Name = Name + self.Name_nsprefix_ = None + self.Address = Address + self.Address_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.ZipCode = ZipCode + self.ZipCode_nsprefix_ = None + self.RetailGXGCutOffTime = RetailGXGCutOffTime + self.RetailGXGCutOffTime_nsprefix_ = None + self.SaturDayCutOffTime = SaturDayCutOffTime + self.SaturDayCutOffTime_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PostOfficeType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PostOfficeType.subclass: + return PostOfficeType.subclass(*args_, **kwargs_) + else: + return PostOfficeType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Name(self): + return self.Name + def set_Name(self, Name): + self.Name = Name + def get_Address(self): + return self.Address + def set_Address(self, Address): + self.Address = Address + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_ZipCode(self): + return self.ZipCode + def set_ZipCode(self, ZipCode): + self.ZipCode = ZipCode + def get_RetailGXGCutOffTime(self): + return self.RetailGXGCutOffTime + def set_RetailGXGCutOffTime(self, RetailGXGCutOffTime): + self.RetailGXGCutOffTime = RetailGXGCutOffTime + def get_SaturDayCutOffTime(self): + return self.SaturDayCutOffTime + def set_SaturDayCutOffTime(self, SaturDayCutOffTime): + self.SaturDayCutOffTime = SaturDayCutOffTime + def has__content(self): + if ( + self.Name is not None or + self.Address is not None or + self.City is not None or + self.State is not None or + self.ZipCode is not None or + self.RetailGXGCutOffTime is not None or + self.SaturDayCutOffTime is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PostOfficeType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PostOfficeType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PostOfficeType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PostOfficeType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PostOfficeType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PostOfficeType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PostOfficeType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Name is not None: + namespaceprefix_ = self.Name_nsprefix_ + ':' if (UseCapturedNS_ and self.Name_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Name), input_name='Name')), namespaceprefix_ , eol_)) + if self.Address is not None: + namespaceprefix_ = self.Address_nsprefix_ + ':' if (UseCapturedNS_ and self.Address_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address), input_name='Address')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.ZipCode is not None: + namespaceprefix_ = self.ZipCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ZipCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZipCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZipCode), input_name='ZipCode')), namespaceprefix_ , eol_)) + if self.RetailGXGCutOffTime is not None: + namespaceprefix_ = self.RetailGXGCutOffTime_nsprefix_ + ':' if (UseCapturedNS_ and self.RetailGXGCutOffTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRetailGXGCutOffTime>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RetailGXGCutOffTime), input_name='RetailGXGCutOffTime')), namespaceprefix_ , eol_)) + if self.SaturDayCutOffTime is not None: + namespaceprefix_ = self.SaturDayCutOffTime_nsprefix_ + ':' if (UseCapturedNS_ and self.SaturDayCutOffTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSaturDayCutOffTime>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SaturDayCutOffTime), input_name='SaturDayCutOffTime')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Name': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Name') + value_ = self.gds_validate_string(value_, node, 'Name') + self.Name = value_ + self.Name_nsprefix_ = child_.prefix + elif nodeName_ == 'Address': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address') + value_ = self.gds_validate_string(value_, node, 'Address') + self.Address = value_ + self.Address_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'ZipCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZipCode') + value_ = self.gds_validate_string(value_, node, 'ZipCode') + self.ZipCode = value_ + self.ZipCode_nsprefix_ = child_.prefix + elif nodeName_ == 'RetailGXGCutOffTime': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RetailGXGCutOffTime') + value_ = self.gds_validate_string(value_, node, 'RetailGXGCutOffTime') + self.RetailGXGCutOffTime = value_ + self.RetailGXGCutOffTime_nsprefix_ = child_.prefix + elif nodeName_ == 'SaturDayCutOffTime': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SaturDayCutOffTime') + value_ = self.gds_validate_string(value_, node, 'SaturDayCutOffTime') + self.SaturDayCutOffTime = value_ + self.SaturDayCutOffTime_nsprefix_ = child_.prefix +# end class PostOfficeType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'IntlRateV2Response' + rootClass = IntlRateV2Response + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'IntlRateV2Response' + rootClass = IntlRateV2Response + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'IntlRateV2Response' + rootClass = IntlRateV2Response + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'IntlRateV2Response' + rootClass = IntlRateV2Response + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from intl_rate_v2_response import *\n\n') + sys.stdout.write('import intl_rate_v2_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "ContentType", + "ExtraServiceType", + "ExtraServicesType", + "GXGLocationsType", + "GXGType", + "IntlRateV2Response", + "PackageType", + "PostOfficeType", + "ServiceType" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/mrsv4_0_request.py b/modules/connectors/usps/karrio/schemas/usps/mrsv4_0_request.py new file mode 100644 index 0000000000..cb400e723c --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/mrsv4_0_request.py @@ -0,0 +1,1828 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:42 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/mrsv4_0_request.py') +# +# Command line arguments: +# ./schemas/MRSV4.0Request.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/mrsv4_0_request.py" ./schemas/MRSV4.0Request.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class MRSV4_0Request(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, Option=None, CustomerName=None, CustomerAddress1=None, CustomerAddress2=None, CustomerCity=None, CustomerState=None, CustomerZip5=None, CustomerZip4=None, RetailerName=None, RetailerAddress=None, PermitNumber=None, PermitIssuingPOCity=None, PermitIssuingPOState=None, PermitIssuingPOZip5=None, PDUFirmName=None, PDUPOBox=None, PDUCity=None, PDUState=None, PDUZip5=None, PDUZip4=None, ServiceType=None, DeliveryConfirmation=None, InsuranceValue=None, MailingAckPackageID=None, WeightInPounds=None, WeightInOunces=None, RMA=None, RMAPICFlag=None, ImageType=None, RMABarcode=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.Option = Option + self.Option_nsprefix_ = None + self.CustomerName = CustomerName + self.CustomerName_nsprefix_ = None + self.CustomerAddress1 = CustomerAddress1 + self.CustomerAddress1_nsprefix_ = None + self.CustomerAddress2 = CustomerAddress2 + self.CustomerAddress2_nsprefix_ = None + self.CustomerCity = CustomerCity + self.CustomerCity_nsprefix_ = None + self.CustomerState = CustomerState + self.CustomerState_nsprefix_ = None + self.CustomerZip5 = CustomerZip5 + self.CustomerZip5_nsprefix_ = None + self.CustomerZip4 = CustomerZip4 + self.CustomerZip4_nsprefix_ = None + self.RetailerName = RetailerName + self.RetailerName_nsprefix_ = None + self.RetailerAddress = RetailerAddress + self.RetailerAddress_nsprefix_ = None + self.PermitNumber = PermitNumber + self.PermitNumber_nsprefix_ = None + self.PermitIssuingPOCity = PermitIssuingPOCity + self.PermitIssuingPOCity_nsprefix_ = None + self.PermitIssuingPOState = PermitIssuingPOState + self.PermitIssuingPOState_nsprefix_ = None + self.PermitIssuingPOZip5 = PermitIssuingPOZip5 + self.PermitIssuingPOZip5_nsprefix_ = None + self.PDUFirmName = PDUFirmName + self.PDUFirmName_nsprefix_ = None + self.PDUPOBox = PDUPOBox + self.PDUPOBox_nsprefix_ = None + self.PDUCity = PDUCity + self.PDUCity_nsprefix_ = None + self.PDUState = PDUState + self.PDUState_nsprefix_ = None + self.PDUZip5 = PDUZip5 + self.PDUZip5_nsprefix_ = None + self.PDUZip4 = PDUZip4 + self.PDUZip4_nsprefix_ = None + self.ServiceType = ServiceType + self.ServiceType_nsprefix_ = None + self.DeliveryConfirmation = DeliveryConfirmation + self.DeliveryConfirmation_nsprefix_ = None + self.InsuranceValue = InsuranceValue + self.InsuranceValue_nsprefix_ = None + self.MailingAckPackageID = MailingAckPackageID + self.MailingAckPackageID_nsprefix_ = None + self.WeightInPounds = WeightInPounds + self.WeightInPounds_nsprefix_ = None + self.WeightInOunces = WeightInOunces + self.WeightInOunces_nsprefix_ = None + self.RMA = RMA + self.RMA_nsprefix_ = None + self.RMAPICFlag = RMAPICFlag + self.RMAPICFlag_nsprefix_ = None + self.ImageType = ImageType + self.ImageType_nsprefix_ = None + self.RMABarcode = RMABarcode + self.RMABarcode_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, MRSV4_0Request) + if subclass is not None: + return subclass(*args_, **kwargs_) + if MRSV4_0Request.subclass: + return MRSV4_0Request.subclass(*args_, **kwargs_) + else: + return MRSV4_0Request(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Option(self): + return self.Option + def set_Option(self, Option): + self.Option = Option + def get_CustomerName(self): + return self.CustomerName + def set_CustomerName(self, CustomerName): + self.CustomerName = CustomerName + def get_CustomerAddress1(self): + return self.CustomerAddress1 + def set_CustomerAddress1(self, CustomerAddress1): + self.CustomerAddress1 = CustomerAddress1 + def get_CustomerAddress2(self): + return self.CustomerAddress2 + def set_CustomerAddress2(self, CustomerAddress2): + self.CustomerAddress2 = CustomerAddress2 + def get_CustomerCity(self): + return self.CustomerCity + def set_CustomerCity(self, CustomerCity): + self.CustomerCity = CustomerCity + def get_CustomerState(self): + return self.CustomerState + def set_CustomerState(self, CustomerState): + self.CustomerState = CustomerState + def get_CustomerZip5(self): + return self.CustomerZip5 + def set_CustomerZip5(self, CustomerZip5): + self.CustomerZip5 = CustomerZip5 + def get_CustomerZip4(self): + return self.CustomerZip4 + def set_CustomerZip4(self, CustomerZip4): + self.CustomerZip4 = CustomerZip4 + def get_RetailerName(self): + return self.RetailerName + def set_RetailerName(self, RetailerName): + self.RetailerName = RetailerName + def get_RetailerAddress(self): + return self.RetailerAddress + def set_RetailerAddress(self, RetailerAddress): + self.RetailerAddress = RetailerAddress + def get_PermitNumber(self): + return self.PermitNumber + def set_PermitNumber(self, PermitNumber): + self.PermitNumber = PermitNumber + def get_PermitIssuingPOCity(self): + return self.PermitIssuingPOCity + def set_PermitIssuingPOCity(self, PermitIssuingPOCity): + self.PermitIssuingPOCity = PermitIssuingPOCity + def get_PermitIssuingPOState(self): + return self.PermitIssuingPOState + def set_PermitIssuingPOState(self, PermitIssuingPOState): + self.PermitIssuingPOState = PermitIssuingPOState + def get_PermitIssuingPOZip5(self): + return self.PermitIssuingPOZip5 + def set_PermitIssuingPOZip5(self, PermitIssuingPOZip5): + self.PermitIssuingPOZip5 = PermitIssuingPOZip5 + def get_PDUFirmName(self): + return self.PDUFirmName + def set_PDUFirmName(self, PDUFirmName): + self.PDUFirmName = PDUFirmName + def get_PDUPOBox(self): + return self.PDUPOBox + def set_PDUPOBox(self, PDUPOBox): + self.PDUPOBox = PDUPOBox + def get_PDUCity(self): + return self.PDUCity + def set_PDUCity(self, PDUCity): + self.PDUCity = PDUCity + def get_PDUState(self): + return self.PDUState + def set_PDUState(self, PDUState): + self.PDUState = PDUState + def get_PDUZip5(self): + return self.PDUZip5 + def set_PDUZip5(self, PDUZip5): + self.PDUZip5 = PDUZip5 + def get_PDUZip4(self): + return self.PDUZip4 + def set_PDUZip4(self, PDUZip4): + self.PDUZip4 = PDUZip4 + def get_ServiceType(self): + return self.ServiceType + def set_ServiceType(self, ServiceType): + self.ServiceType = ServiceType + def get_DeliveryConfirmation(self): + return self.DeliveryConfirmation + def set_DeliveryConfirmation(self, DeliveryConfirmation): + self.DeliveryConfirmation = DeliveryConfirmation + def get_InsuranceValue(self): + return self.InsuranceValue + def set_InsuranceValue(self, InsuranceValue): + self.InsuranceValue = InsuranceValue + def get_MailingAckPackageID(self): + return self.MailingAckPackageID + def set_MailingAckPackageID(self, MailingAckPackageID): + self.MailingAckPackageID = MailingAckPackageID + def get_WeightInPounds(self): + return self.WeightInPounds + def set_WeightInPounds(self, WeightInPounds): + self.WeightInPounds = WeightInPounds + def get_WeightInOunces(self): + return self.WeightInOunces + def set_WeightInOunces(self, WeightInOunces): + self.WeightInOunces = WeightInOunces + def get_RMA(self): + return self.RMA + def set_RMA(self, RMA): + self.RMA = RMA + def get_RMAPICFlag(self): + return self.RMAPICFlag + def set_RMAPICFlag(self, RMAPICFlag): + self.RMAPICFlag = RMAPICFlag + def get_ImageType(self): + return self.ImageType + def set_ImageType(self, ImageType): + self.ImageType = ImageType + def get_RMABarcode(self): + return self.RMABarcode + def set_RMABarcode(self, RMABarcode): + self.RMABarcode = RMABarcode + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.Option is not None or + self.CustomerName is not None or + self.CustomerAddress1 is not None or + self.CustomerAddress2 is not None or + self.CustomerCity is not None or + self.CustomerState is not None or + self.CustomerZip5 is not None or + self.CustomerZip4 is not None or + self.RetailerName is not None or + self.RetailerAddress is not None or + self.PermitNumber is not None or + self.PermitIssuingPOCity is not None or + self.PermitIssuingPOState is not None or + self.PermitIssuingPOZip5 is not None or + self.PDUFirmName is not None or + self.PDUPOBox is not None or + self.PDUCity is not None or + self.PDUState is not None or + self.PDUZip5 is not None or + self.PDUZip4 is not None or + self.ServiceType is not None or + self.DeliveryConfirmation is not None or + self.InsuranceValue is not None or + self.MailingAckPackageID is not None or + self.WeightInPounds is not None or + self.WeightInOunces is not None or + self.RMA is not None or + self.RMAPICFlag is not None or + self.ImageType is not None or + self.RMABarcode is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='MRSV4.0Request', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('MRSV4.0Request') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'MRSV4.0Request': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='MRSV4.0Request') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='MRSV4.0Request', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='MRSV4.0Request'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='MRSV4.0Request', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Option is not None: + namespaceprefix_ = self.Option_nsprefix_ + ':' if (UseCapturedNS_ and self.Option_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOption>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Option), input_name='Option')), namespaceprefix_ , eol_)) + if self.CustomerName is not None: + namespaceprefix_ = self.CustomerName_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerName), input_name='CustomerName')), namespaceprefix_ , eol_)) + if self.CustomerAddress1 is not None: + namespaceprefix_ = self.CustomerAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerAddress1), input_name='CustomerAddress1')), namespaceprefix_ , eol_)) + if self.CustomerAddress2 is not None: + namespaceprefix_ = self.CustomerAddress2_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerAddress2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerAddress2), input_name='CustomerAddress2')), namespaceprefix_ , eol_)) + if self.CustomerCity is not None: + namespaceprefix_ = self.CustomerCity_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerCity), input_name='CustomerCity')), namespaceprefix_ , eol_)) + if self.CustomerState is not None: + namespaceprefix_ = self.CustomerState_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerState), input_name='CustomerState')), namespaceprefix_ , eol_)) + if self.CustomerZip5 is not None: + namespaceprefix_ = self.CustomerZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerZip5>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.CustomerZip5, input_name='CustomerZip5'), namespaceprefix_ , eol_)) + if self.CustomerZip4 is not None: + namespaceprefix_ = self.CustomerZip4_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerZip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerZip4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerZip4), input_name='CustomerZip4')), namespaceprefix_ , eol_)) + if self.RetailerName is not None: + namespaceprefix_ = self.RetailerName_nsprefix_ + ':' if (UseCapturedNS_ and self.RetailerName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRetailerName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RetailerName), input_name='RetailerName')), namespaceprefix_ , eol_)) + if self.RetailerAddress is not None: + namespaceprefix_ = self.RetailerAddress_nsprefix_ + ':' if (UseCapturedNS_ and self.RetailerAddress_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRetailerAddress>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RetailerAddress), input_name='RetailerAddress')), namespaceprefix_ , eol_)) + if self.PermitNumber is not None: + namespaceprefix_ = self.PermitNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitNumber>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.PermitNumber, input_name='PermitNumber'), namespaceprefix_ , eol_)) + if self.PermitIssuingPOCity is not None: + namespaceprefix_ = self.PermitIssuingPOCity_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitIssuingPOCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitIssuingPOCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PermitIssuingPOCity), input_name='PermitIssuingPOCity')), namespaceprefix_ , eol_)) + if self.PermitIssuingPOState is not None: + namespaceprefix_ = self.PermitIssuingPOState_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitIssuingPOState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitIssuingPOState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PermitIssuingPOState), input_name='PermitIssuingPOState')), namespaceprefix_ , eol_)) + if self.PermitIssuingPOZip5 is not None: + namespaceprefix_ = self.PermitIssuingPOZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitIssuingPOZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitIssuingPOZip5>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.PermitIssuingPOZip5, input_name='PermitIssuingPOZip5'), namespaceprefix_ , eol_)) + if self.PDUFirmName is not None: + namespaceprefix_ = self.PDUFirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.PDUFirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPDUFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PDUFirmName), input_name='PDUFirmName')), namespaceprefix_ , eol_)) + if self.PDUPOBox is not None: + namespaceprefix_ = self.PDUPOBox_nsprefix_ + ':' if (UseCapturedNS_ and self.PDUPOBox_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPDUPOBox>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PDUPOBox), input_name='PDUPOBox')), namespaceprefix_ , eol_)) + if self.PDUCity is not None: + namespaceprefix_ = self.PDUCity_nsprefix_ + ':' if (UseCapturedNS_ and self.PDUCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPDUCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PDUCity), input_name='PDUCity')), namespaceprefix_ , eol_)) + if self.PDUState is not None: + namespaceprefix_ = self.PDUState_nsprefix_ + ':' if (UseCapturedNS_ and self.PDUState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPDUState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PDUState), input_name='PDUState')), namespaceprefix_ , eol_)) + if self.PDUZip5 is not None: + namespaceprefix_ = self.PDUZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.PDUZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPDUZip5>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.PDUZip5, input_name='PDUZip5'), namespaceprefix_ , eol_)) + if self.PDUZip4 is not None: + namespaceprefix_ = self.PDUZip4_nsprefix_ + ':' if (UseCapturedNS_ and self.PDUZip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPDUZip4>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.PDUZip4, input_name='PDUZip4'), namespaceprefix_ , eol_)) + if self.ServiceType is not None: + namespaceprefix_ = self.ServiceType_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceType), input_name='ServiceType')), namespaceprefix_ , eol_)) + if self.DeliveryConfirmation is not None: + namespaceprefix_ = self.DeliveryConfirmation_nsprefix_ + ':' if (UseCapturedNS_ and self.DeliveryConfirmation_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDeliveryConfirmation>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DeliveryConfirmation), input_name='DeliveryConfirmation')), namespaceprefix_ , eol_)) + if self.InsuranceValue is not None: + namespaceprefix_ = self.InsuranceValue_nsprefix_ + ':' if (UseCapturedNS_ and self.InsuranceValue_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInsuranceValue>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InsuranceValue), input_name='InsuranceValue')), namespaceprefix_ , eol_)) + if self.MailingAckPackageID is not None: + namespaceprefix_ = self.MailingAckPackageID_nsprefix_ + ':' if (UseCapturedNS_ and self.MailingAckPackageID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMailingAckPackageID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MailingAckPackageID), input_name='MailingAckPackageID')), namespaceprefix_ , eol_)) + if self.WeightInPounds is not None: + namespaceprefix_ = self.WeightInPounds_nsprefix_ + ':' if (UseCapturedNS_ and self.WeightInPounds_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sWeightInPounds>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.WeightInPounds, input_name='WeightInPounds'), namespaceprefix_ , eol_)) + if self.WeightInOunces is not None: + namespaceprefix_ = self.WeightInOunces_nsprefix_ + ':' if (UseCapturedNS_ and self.WeightInOunces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sWeightInOunces>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.WeightInOunces, input_name='WeightInOunces'), namespaceprefix_ , eol_)) + if self.RMA is not None: + namespaceprefix_ = self.RMA_nsprefix_ + ':' if (UseCapturedNS_ and self.RMA_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRMA>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RMA), input_name='RMA')), namespaceprefix_ , eol_)) + if self.RMAPICFlag is not None: + namespaceprefix_ = self.RMAPICFlag_nsprefix_ + ':' if (UseCapturedNS_ and self.RMAPICFlag_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRMAPICFlag>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RMAPICFlag), input_name='RMAPICFlag')), namespaceprefix_ , eol_)) + if self.ImageType is not None: + namespaceprefix_ = self.ImageType_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageType), input_name='ImageType')), namespaceprefix_ , eol_)) + if self.RMABarcode is not None: + namespaceprefix_ = self.RMABarcode_nsprefix_ + ':' if (UseCapturedNS_ and self.RMABarcode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRMABarcode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RMABarcode), input_name='RMABarcode')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Option': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Option') + value_ = self.gds_validate_string(value_, node, 'Option') + self.Option = value_ + self.Option_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerName') + value_ = self.gds_validate_string(value_, node, 'CustomerName') + self.CustomerName = value_ + self.CustomerName_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerAddress1') + value_ = self.gds_validate_string(value_, node, 'CustomerAddress1') + self.CustomerAddress1 = value_ + self.CustomerAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerAddress2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerAddress2') + value_ = self.gds_validate_string(value_, node, 'CustomerAddress2') + self.CustomerAddress2 = value_ + self.CustomerAddress2_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerCity') + value_ = self.gds_validate_string(value_, node, 'CustomerCity') + self.CustomerCity = value_ + self.CustomerCity_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerState') + value_ = self.gds_validate_string(value_, node, 'CustomerState') + self.CustomerState = value_ + self.CustomerState_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerZip5' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'CustomerZip5') + ival_ = self.gds_validate_integer(ival_, node, 'CustomerZip5') + self.CustomerZip5 = ival_ + self.CustomerZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerZip4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerZip4') + value_ = self.gds_validate_string(value_, node, 'CustomerZip4') + self.CustomerZip4 = value_ + self.CustomerZip4_nsprefix_ = child_.prefix + elif nodeName_ == 'RetailerName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RetailerName') + value_ = self.gds_validate_string(value_, node, 'RetailerName') + self.RetailerName = value_ + self.RetailerName_nsprefix_ = child_.prefix + elif nodeName_ == 'RetailerAddress': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RetailerAddress') + value_ = self.gds_validate_string(value_, node, 'RetailerAddress') + self.RetailerAddress = value_ + self.RetailerAddress_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitNumber' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'PermitNumber') + ival_ = self.gds_validate_integer(ival_, node, 'PermitNumber') + self.PermitNumber = ival_ + self.PermitNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitIssuingPOCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PermitIssuingPOCity') + value_ = self.gds_validate_string(value_, node, 'PermitIssuingPOCity') + self.PermitIssuingPOCity = value_ + self.PermitIssuingPOCity_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitIssuingPOState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PermitIssuingPOState') + value_ = self.gds_validate_string(value_, node, 'PermitIssuingPOState') + self.PermitIssuingPOState = value_ + self.PermitIssuingPOState_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitIssuingPOZip5' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'PermitIssuingPOZip5') + ival_ = self.gds_validate_integer(ival_, node, 'PermitIssuingPOZip5') + self.PermitIssuingPOZip5 = ival_ + self.PermitIssuingPOZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'PDUFirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PDUFirmName') + value_ = self.gds_validate_string(value_, node, 'PDUFirmName') + self.PDUFirmName = value_ + self.PDUFirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'PDUPOBox': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PDUPOBox') + value_ = self.gds_validate_string(value_, node, 'PDUPOBox') + self.PDUPOBox = value_ + self.PDUPOBox_nsprefix_ = child_.prefix + elif nodeName_ == 'PDUCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PDUCity') + value_ = self.gds_validate_string(value_, node, 'PDUCity') + self.PDUCity = value_ + self.PDUCity_nsprefix_ = child_.prefix + elif nodeName_ == 'PDUState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PDUState') + value_ = self.gds_validate_string(value_, node, 'PDUState') + self.PDUState = value_ + self.PDUState_nsprefix_ = child_.prefix + elif nodeName_ == 'PDUZip5' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'PDUZip5') + ival_ = self.gds_validate_integer(ival_, node, 'PDUZip5') + self.PDUZip5 = ival_ + self.PDUZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'PDUZip4' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'PDUZip4') + ival_ = self.gds_validate_integer(ival_, node, 'PDUZip4') + self.PDUZip4 = ival_ + self.PDUZip4_nsprefix_ = child_.prefix + elif nodeName_ == 'ServiceType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceType') + value_ = self.gds_validate_string(value_, node, 'ServiceType') + self.ServiceType = value_ + self.ServiceType_nsprefix_ = child_.prefix + elif nodeName_ == 'DeliveryConfirmation': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DeliveryConfirmation') + value_ = self.gds_validate_string(value_, node, 'DeliveryConfirmation') + self.DeliveryConfirmation = value_ + self.DeliveryConfirmation_nsprefix_ = child_.prefix + elif nodeName_ == 'InsuranceValue': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'InsuranceValue') + value_ = self.gds_validate_string(value_, node, 'InsuranceValue') + self.InsuranceValue = value_ + self.InsuranceValue_nsprefix_ = child_.prefix + elif nodeName_ == 'MailingAckPackageID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'MailingAckPackageID') + value_ = self.gds_validate_string(value_, node, 'MailingAckPackageID') + self.MailingAckPackageID = value_ + self.MailingAckPackageID_nsprefix_ = child_.prefix + elif nodeName_ == 'WeightInPounds' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'WeightInPounds') + ival_ = self.gds_validate_integer(ival_, node, 'WeightInPounds') + self.WeightInPounds = ival_ + self.WeightInPounds_nsprefix_ = child_.prefix + elif nodeName_ == 'WeightInOunces' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'WeightInOunces') + ival_ = self.gds_validate_integer(ival_, node, 'WeightInOunces') + self.WeightInOunces = ival_ + self.WeightInOunces_nsprefix_ = child_.prefix + elif nodeName_ == 'RMA': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RMA') + value_ = self.gds_validate_string(value_, node, 'RMA') + self.RMA = value_ + self.RMA_nsprefix_ = child_.prefix + elif nodeName_ == 'RMAPICFlag': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RMAPICFlag') + value_ = self.gds_validate_string(value_, node, 'RMAPICFlag') + self.RMAPICFlag = value_ + self.RMAPICFlag_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageType') + value_ = self.gds_validate_string(value_, node, 'ImageType') + self.ImageType = value_ + self.ImageType_nsprefix_ = child_.prefix + elif nodeName_ == 'RMABarcode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RMABarcode') + value_ = self.gds_validate_string(value_, node, 'RMABarcode') + self.RMABarcode = value_ + self.RMABarcode_nsprefix_ = child_.prefix +# end class MRSV4_0Request + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'MRSV4_0Request' + rootClass = MRSV4_0Request + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'MRSV4_0Request' + rootClass = MRSV4_0Request + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'MRSV4_0Request' + rootClass = MRSV4_0Request + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'MRSV4_0Request' + rootClass = MRSV4_0Request + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from mrsv4_0_request import *\n\n') + sys.stdout.write('import mrsv4_0_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "MRSV4_0Request" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/priority_mail_request.py b/modules/connectors/usps/karrio/schemas/usps/priority_mail_request.py new file mode 100644 index 0000000000..fc5b3c9999 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/priority_mail_request.py @@ -0,0 +1,1403 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:44 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/priority_mail_request.py') +# +# Command line arguments: +# ./schemas/PriorityMailRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/priority_mail_request.py" ./schemas/PriorityMailRequest.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class PriorityMailRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, OriginZip=None, DestinationZip=None, DestinationType=None, PMGuarantee=None, ClientType=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.OriginZip = OriginZip + self.OriginZip_nsprefix_ = None + self.DestinationZip = DestinationZip + self.DestinationZip_nsprefix_ = None + self.DestinationType = DestinationType + self.DestinationType_nsprefix_ = None + self.PMGuarantee = PMGuarantee + self.PMGuarantee_nsprefix_ = None + self.ClientType = ClientType + self.ClientType_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PriorityMailRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PriorityMailRequest.subclass: + return PriorityMailRequest.subclass(*args_, **kwargs_) + else: + return PriorityMailRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_OriginZip(self): + return self.OriginZip + def set_OriginZip(self, OriginZip): + self.OriginZip = OriginZip + def get_DestinationZip(self): + return self.DestinationZip + def set_DestinationZip(self, DestinationZip): + self.DestinationZip = DestinationZip + def get_DestinationType(self): + return self.DestinationType + def set_DestinationType(self, DestinationType): + self.DestinationType = DestinationType + def get_PMGuarantee(self): + return self.PMGuarantee + def set_PMGuarantee(self, PMGuarantee): + self.PMGuarantee = PMGuarantee + def get_ClientType(self): + return self.ClientType + def set_ClientType(self, ClientType): + self.ClientType = ClientType + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.OriginZip is not None or + self.DestinationZip is not None or + self.DestinationType is not None or + self.PMGuarantee is not None or + self.ClientType is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PriorityMailRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PriorityMailRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PriorityMailRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PriorityMailRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PriorityMailRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PriorityMailRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PriorityMailRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.OriginZip is not None: + namespaceprefix_ = self.OriginZip_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.OriginZip, input_name='OriginZip'), namespaceprefix_ , eol_)) + if self.DestinationZip is not None: + namespaceprefix_ = self.DestinationZip_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.DestinationZip, input_name='DestinationZip'), namespaceprefix_ , eol_)) + if self.DestinationType is not None: + namespaceprefix_ = self.DestinationType_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationType>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.DestinationType, input_name='DestinationType'), namespaceprefix_ , eol_)) + if self.PMGuarantee is not None: + namespaceprefix_ = self.PMGuarantee_nsprefix_ + ':' if (UseCapturedNS_ and self.PMGuarantee_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPMGuarantee>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PMGuarantee), input_name='PMGuarantee')), namespaceprefix_ , eol_)) + if self.ClientType is not None: + namespaceprefix_ = self.ClientType_nsprefix_ + ':' if (UseCapturedNS_ and self.ClientType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sClientType>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ClientType, input_name='ClientType'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'OriginZip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'OriginZip') + ival_ = self.gds_validate_integer(ival_, node, 'OriginZip') + self.OriginZip = ival_ + self.OriginZip_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationZip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'DestinationZip') + ival_ = self.gds_validate_integer(ival_, node, 'DestinationZip') + self.DestinationZip = ival_ + self.DestinationZip_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationType' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'DestinationType') + ival_ = self.gds_validate_integer(ival_, node, 'DestinationType') + self.DestinationType = ival_ + self.DestinationType_nsprefix_ = child_.prefix + elif nodeName_ == 'PMGuarantee': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PMGuarantee') + value_ = self.gds_validate_string(value_, node, 'PMGuarantee') + self.PMGuarantee = value_ + self.PMGuarantee_nsprefix_ = child_.prefix + elif nodeName_ == 'ClientType' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ClientType') + ival_ = self.gds_validate_integer(ival_, node, 'ClientType') + self.ClientType = ival_ + self.ClientType_nsprefix_ = child_.prefix +# end class PriorityMailRequest + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PriorityMailRequest' + rootClass = PriorityMailRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PriorityMailRequest' + rootClass = PriorityMailRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PriorityMailRequest' + rootClass = PriorityMailRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PriorityMailRequest' + rootClass = PriorityMailRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from priority_mail_request import *\n\n') + sys.stdout.write('import priority_mail_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "PriorityMailRequest" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/priority_mail_response.py b/modules/connectors/usps/karrio/schemas/usps/priority_mail_response.py new file mode 100644 index 0000000000..c7278c2b57 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/priority_mail_response.py @@ -0,0 +1,1396 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:44 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/priority_mail_response.py') +# +# Command line arguments: +# ./schemas/PriorityMailResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/priority_mail_response.py" ./schemas/PriorityMailResponse.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class PriorityMailResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, OriginZip=None, DestinationZip=None, Days=None, Message=None, EffectiveAcceptanceDate=None, ScheduledDeliveryDate=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.OriginZip = OriginZip + self.OriginZip_nsprefix_ = None + self.DestinationZip = DestinationZip + self.DestinationZip_nsprefix_ = None + self.Days = Days + self.Days_nsprefix_ = None + self.Message = Message + self.Message_nsprefix_ = None + self.EffectiveAcceptanceDate = EffectiveAcceptanceDate + self.EffectiveAcceptanceDate_nsprefix_ = None + self.ScheduledDeliveryDate = ScheduledDeliveryDate + self.ScheduledDeliveryDate_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PriorityMailResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PriorityMailResponse.subclass: + return PriorityMailResponse.subclass(*args_, **kwargs_) + else: + return PriorityMailResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_OriginZip(self): + return self.OriginZip + def set_OriginZip(self, OriginZip): + self.OriginZip = OriginZip + def get_DestinationZip(self): + return self.DestinationZip + def set_DestinationZip(self, DestinationZip): + self.DestinationZip = DestinationZip + def get_Days(self): + return self.Days + def set_Days(self, Days): + self.Days = Days + def get_Message(self): + return self.Message + def set_Message(self, Message): + self.Message = Message + def get_EffectiveAcceptanceDate(self): + return self.EffectiveAcceptanceDate + def set_EffectiveAcceptanceDate(self, EffectiveAcceptanceDate): + self.EffectiveAcceptanceDate = EffectiveAcceptanceDate + def get_ScheduledDeliveryDate(self): + return self.ScheduledDeliveryDate + def set_ScheduledDeliveryDate(self, ScheduledDeliveryDate): + self.ScheduledDeliveryDate = ScheduledDeliveryDate + def has__content(self): + if ( + self.OriginZip is not None or + self.DestinationZip is not None or + self.Days is not None or + self.Message is not None or + self.EffectiveAcceptanceDate is not None or + self.ScheduledDeliveryDate is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PriorityMailResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PriorityMailResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PriorityMailResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PriorityMailResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PriorityMailResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PriorityMailResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PriorityMailResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.OriginZip is not None: + namespaceprefix_ = self.OriginZip_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.OriginZip, input_name='OriginZip'), namespaceprefix_ , eol_)) + if self.DestinationZip is not None: + namespaceprefix_ = self.DestinationZip_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.DestinationZip, input_name='DestinationZip'), namespaceprefix_ , eol_)) + if self.Days is not None: + namespaceprefix_ = self.Days_nsprefix_ + ':' if (UseCapturedNS_ and self.Days_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDays>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Days, input_name='Days'), namespaceprefix_ , eol_)) + if self.Message is not None: + namespaceprefix_ = self.Message_nsprefix_ + ':' if (UseCapturedNS_ and self.Message_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMessage>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Message), input_name='Message')), namespaceprefix_ , eol_)) + if self.EffectiveAcceptanceDate is not None: + namespaceprefix_ = self.EffectiveAcceptanceDate_nsprefix_ + ':' if (UseCapturedNS_ and self.EffectiveAcceptanceDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEffectiveAcceptanceDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EffectiveAcceptanceDate), input_name='EffectiveAcceptanceDate')), namespaceprefix_ , eol_)) + if self.ScheduledDeliveryDate is not None: + namespaceprefix_ = self.ScheduledDeliveryDate_nsprefix_ + ':' if (UseCapturedNS_ and self.ScheduledDeliveryDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sScheduledDeliveryDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ScheduledDeliveryDate), input_name='ScheduledDeliveryDate')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'OriginZip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'OriginZip') + ival_ = self.gds_validate_integer(ival_, node, 'OriginZip') + self.OriginZip = ival_ + self.OriginZip_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationZip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'DestinationZip') + ival_ = self.gds_validate_integer(ival_, node, 'DestinationZip') + self.DestinationZip = ival_ + self.DestinationZip_nsprefix_ = child_.prefix + elif nodeName_ == 'Days' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Days') + ival_ = self.gds_validate_integer(ival_, node, 'Days') + self.Days = ival_ + self.Days_nsprefix_ = child_.prefix + elif nodeName_ == 'Message': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Message') + value_ = self.gds_validate_string(value_, node, 'Message') + self.Message = value_ + self.Message_nsprefix_ = child_.prefix + elif nodeName_ == 'EffectiveAcceptanceDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EffectiveAcceptanceDate') + value_ = self.gds_validate_string(value_, node, 'EffectiveAcceptanceDate') + self.EffectiveAcceptanceDate = value_ + self.EffectiveAcceptanceDate_nsprefix_ = child_.prefix + elif nodeName_ == 'ScheduledDeliveryDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ScheduledDeliveryDate') + value_ = self.gds_validate_string(value_, node, 'ScheduledDeliveryDate') + self.ScheduledDeliveryDate = value_ + self.ScheduledDeliveryDate_nsprefix_ = child_.prefix +# end class PriorityMailResponse + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PriorityMailResponse' + rootClass = PriorityMailResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PriorityMailResponse' + rootClass = PriorityMailResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PriorityMailResponse' + rootClass = PriorityMailResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PriorityMailResponse' + rootClass = PriorityMailResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from priority_mail_response import *\n\n') + sys.stdout.write('import priority_mail_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "PriorityMailResponse" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/pts_email_request.py b/modules/connectors/usps/karrio/schemas/usps/pts_email_request.py new file mode 100644 index 0000000000..a91326da87 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/pts_email_request.py @@ -0,0 +1,1497 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:43 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/pts_email_request.py') +# +# Command line arguments: +# ./schemas/PTSEmailRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/pts_email_request.py" ./schemas/PTSEmailRequest.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class PTSEmailRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, TrackId=None, ClientIp=None, MpSuffix=None, MpDate=None, RequestType=None, FirstName=None, LastName=None, Email1=None, Email2=None, Email3=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.TrackId = TrackId + self.TrackId_nsprefix_ = None + self.ClientIp = ClientIp + self.ClientIp_nsprefix_ = None + self.MpSuffix = MpSuffix + self.MpSuffix_nsprefix_ = None + self.MpDate = MpDate + self.MpDate_nsprefix_ = None + if RequestType is None: + self.RequestType = [] + else: + self.RequestType = RequestType + self.RequestType_nsprefix_ = None + self.FirstName = FirstName + self.FirstName_nsprefix_ = None + self.LastName = LastName + self.LastName_nsprefix_ = None + self.Email1 = Email1 + self.Email1_nsprefix_ = None + self.Email2 = Email2 + self.Email2_nsprefix_ = None + self.Email3 = Email3 + self.Email3_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PTSEmailRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PTSEmailRequest.subclass: + return PTSEmailRequest.subclass(*args_, **kwargs_) + else: + return PTSEmailRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_TrackId(self): + return self.TrackId + def set_TrackId(self, TrackId): + self.TrackId = TrackId + def get_ClientIp(self): + return self.ClientIp + def set_ClientIp(self, ClientIp): + self.ClientIp = ClientIp + def get_MpSuffix(self): + return self.MpSuffix + def set_MpSuffix(self, MpSuffix): + self.MpSuffix = MpSuffix + def get_MpDate(self): + return self.MpDate + def set_MpDate(self, MpDate): + self.MpDate = MpDate + def get_RequestType(self): + return self.RequestType + def set_RequestType(self, RequestType): + self.RequestType = RequestType + def add_RequestType(self, value): + self.RequestType.append(value) + def insert_RequestType_at(self, index, value): + self.RequestType.insert(index, value) + def replace_RequestType_at(self, index, value): + self.RequestType[index] = value + def get_FirstName(self): + return self.FirstName + def set_FirstName(self, FirstName): + self.FirstName = FirstName + def get_LastName(self): + return self.LastName + def set_LastName(self, LastName): + self.LastName = LastName + def get_Email1(self): + return self.Email1 + def set_Email1(self, Email1): + self.Email1 = Email1 + def get_Email2(self): + return self.Email2 + def set_Email2(self, Email2): + self.Email2 = Email2 + def get_Email3(self): + return self.Email3 + def set_Email3(self, Email3): + self.Email3 = Email3 + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.TrackId is not None or + self.ClientIp is not None or + self.MpSuffix is not None or + self.MpDate is not None or + self.RequestType or + self.FirstName is not None or + self.LastName is not None or + self.Email1 is not None or + self.Email2 is not None or + self.Email3 is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PTSEmailRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PTSEmailRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PTSEmailRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PTSEmailRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PTSEmailRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PTSEmailRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PTSEmailRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.TrackId is not None: + namespaceprefix_ = self.TrackId_nsprefix_ + ':' if (UseCapturedNS_ and self.TrackId_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTrackId>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TrackId), input_name='TrackId')), namespaceprefix_ , eol_)) + if self.ClientIp is not None: + namespaceprefix_ = self.ClientIp_nsprefix_ + ':' if (UseCapturedNS_ and self.ClientIp_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sClientIp>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ClientIp), input_name='ClientIp')), namespaceprefix_ , eol_)) + if self.MpSuffix is not None: + namespaceprefix_ = self.MpSuffix_nsprefix_ + ':' if (UseCapturedNS_ and self.MpSuffix_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMpSuffix>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.MpSuffix, input_name='MpSuffix'), namespaceprefix_ , eol_)) + if self.MpDate is not None: + namespaceprefix_ = self.MpDate_nsprefix_ + ':' if (UseCapturedNS_ and self.MpDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMpDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MpDate), input_name='MpDate')), namespaceprefix_ , eol_)) + for RequestType_ in self.RequestType: + namespaceprefix_ = self.RequestType_nsprefix_ + ':' if (UseCapturedNS_ and self.RequestType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRequestType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(RequestType_), input_name='RequestType')), namespaceprefix_ , eol_)) + if self.FirstName is not None: + namespaceprefix_ = self.FirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirstName), input_name='FirstName')), namespaceprefix_ , eol_)) + if self.LastName is not None: + namespaceprefix_ = self.LastName_nsprefix_ + ':' if (UseCapturedNS_ and self.LastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LastName), input_name='LastName')), namespaceprefix_ , eol_)) + if self.Email1 is not None: + namespaceprefix_ = self.Email1_nsprefix_ + ':' if (UseCapturedNS_ and self.Email1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEmail1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Email1), input_name='Email1')), namespaceprefix_ , eol_)) + if self.Email2 is not None: + namespaceprefix_ = self.Email2_nsprefix_ + ':' if (UseCapturedNS_ and self.Email2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEmail2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Email2), input_name='Email2')), namespaceprefix_ , eol_)) + if self.Email3 is not None: + namespaceprefix_ = self.Email3_nsprefix_ + ':' if (UseCapturedNS_ and self.Email3_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEmail3>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Email3), input_name='Email3')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'TrackId': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'TrackId') + value_ = self.gds_validate_string(value_, node, 'TrackId') + self.TrackId = value_ + self.TrackId_nsprefix_ = child_.prefix + elif nodeName_ == 'ClientIp': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ClientIp') + value_ = self.gds_validate_string(value_, node, 'ClientIp') + self.ClientIp = value_ + self.ClientIp_nsprefix_ = child_.prefix + elif nodeName_ == 'MpSuffix' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'MpSuffix') + ival_ = self.gds_validate_integer(ival_, node, 'MpSuffix') + self.MpSuffix = ival_ + self.MpSuffix_nsprefix_ = child_.prefix + elif nodeName_ == 'MpDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'MpDate') + value_ = self.gds_validate_string(value_, node, 'MpDate') + self.MpDate = value_ + self.MpDate_nsprefix_ = child_.prefix + elif nodeName_ == 'RequestType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RequestType') + value_ = self.gds_validate_string(value_, node, 'RequestType') + self.RequestType.append(value_) + self.RequestType_nsprefix_ = child_.prefix + elif nodeName_ == 'FirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirstName') + value_ = self.gds_validate_string(value_, node, 'FirstName') + self.FirstName = value_ + self.FirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'LastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LastName') + value_ = self.gds_validate_string(value_, node, 'LastName') + self.LastName = value_ + self.LastName_nsprefix_ = child_.prefix + elif nodeName_ == 'Email1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Email1') + value_ = self.gds_validate_string(value_, node, 'Email1') + self.Email1 = value_ + self.Email1_nsprefix_ = child_.prefix + elif nodeName_ == 'Email2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Email2') + value_ = self.gds_validate_string(value_, node, 'Email2') + self.Email2 = value_ + self.Email2_nsprefix_ = child_.prefix + elif nodeName_ == 'Email3': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Email3') + value_ = self.gds_validate_string(value_, node, 'Email3') + self.Email3 = value_ + self.Email3_nsprefix_ = child_.prefix +# end class PTSEmailRequest + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSEmailRequest' + rootClass = PTSEmailRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSEmailRequest' + rootClass = PTSEmailRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSEmailRequest' + rootClass = PTSEmailRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSEmailRequest' + rootClass = PTSEmailRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from pts_email_request import *\n\n') + sys.stdout.write('import pts_email_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "PTSEmailRequest" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/pts_emailresult.py b/modules/connectors/usps/karrio/schemas/usps/pts_emailresult.py new file mode 100644 index 0000000000..fd9ba61c26 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/pts_emailresult.py @@ -0,0 +1,1328 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:43 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/pts_emailresult.py') +# +# Command line arguments: +# ./schemas/PTSEmailResult.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/pts_emailresult.py" ./schemas/PTSEmailResult.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class PTSEMAILRESULT(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ResultText=None, ReturnCode=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ResultText = ResultText + self.ResultText_nsprefix_ = None + self.ReturnCode = ReturnCode + self.ReturnCode_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PTSEMAILRESULT) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PTSEMAILRESULT.subclass: + return PTSEMAILRESULT.subclass(*args_, **kwargs_) + else: + return PTSEMAILRESULT(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ResultText(self): + return self.ResultText + def set_ResultText(self, ResultText): + self.ResultText = ResultText + def get_ReturnCode(self): + return self.ReturnCode + def set_ReturnCode(self, ReturnCode): + self.ReturnCode = ReturnCode + def has__content(self): + if ( + self.ResultText is not None or + self.ReturnCode is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PTSEMAILRESULT', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PTSEMAILRESULT') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PTSEMAILRESULT': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PTSEMAILRESULT') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PTSEMAILRESULT', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PTSEMAILRESULT'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PTSEMAILRESULT', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ResultText is not None: + namespaceprefix_ = self.ResultText_nsprefix_ + ':' if (UseCapturedNS_ and self.ResultText_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sResultText>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ResultText), input_name='ResultText')), namespaceprefix_ , eol_)) + if self.ReturnCode is not None: + namespaceprefix_ = self.ReturnCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ReturnCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sReturnCode>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ReturnCode, input_name='ReturnCode'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ResultText': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ResultText') + value_ = self.gds_validate_string(value_, node, 'ResultText') + self.ResultText = value_ + self.ResultText_nsprefix_ = child_.prefix + elif nodeName_ == 'ReturnCode' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ReturnCode') + ival_ = self.gds_validate_integer(ival_, node, 'ReturnCode') + self.ReturnCode = ival_ + self.ReturnCode_nsprefix_ = child_.prefix +# end class PTSEMAILRESULT + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSEMAILRESULT' + rootClass = PTSEMAILRESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSEMAILRESULT' + rootClass = PTSEMAILRESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSEMAILRESULT' + rootClass = PTSEMAILRESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSEMAILRESULT' + rootClass = PTSEMAILRESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from pts_emailresult import *\n\n') + sys.stdout.write('import pts_emailresult as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "PTSEMAILRESULT" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/ptspod_result.py b/modules/connectors/usps/karrio/schemas/usps/ptspod_result.py new file mode 100644 index 0000000000..c8d29ded67 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/ptspod_result.py @@ -0,0 +1,1328 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:43 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/ptspod_result.py') +# +# Command line arguments: +# ./schemas/PTSPODResult.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/ptspod_result.py" ./schemas/PTSPODResult.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class PTSPODRESULT(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ResultText=None, ReturnCode=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ResultText = ResultText + self.ResultText_nsprefix_ = None + self.ReturnCode = ReturnCode + self.ReturnCode_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PTSPODRESULT) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PTSPODRESULT.subclass: + return PTSPODRESULT.subclass(*args_, **kwargs_) + else: + return PTSPODRESULT(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ResultText(self): + return self.ResultText + def set_ResultText(self, ResultText): + self.ResultText = ResultText + def get_ReturnCode(self): + return self.ReturnCode + def set_ReturnCode(self, ReturnCode): + self.ReturnCode = ReturnCode + def has__content(self): + if ( + self.ResultText is not None or + self.ReturnCode is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PTSPODRESULT', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PTSPODRESULT') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PTSPODRESULT': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PTSPODRESULT') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PTSPODRESULT', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PTSPODRESULT'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PTSPODRESULT', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ResultText is not None: + namespaceprefix_ = self.ResultText_nsprefix_ + ':' if (UseCapturedNS_ and self.ResultText_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sResultText>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ResultText), input_name='ResultText')), namespaceprefix_ , eol_)) + if self.ReturnCode is not None: + namespaceprefix_ = self.ReturnCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ReturnCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sReturnCode>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ReturnCode, input_name='ReturnCode'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ResultText': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ResultText') + value_ = self.gds_validate_string(value_, node, 'ResultText') + self.ResultText = value_ + self.ResultText_nsprefix_ = child_.prefix + elif nodeName_ == 'ReturnCode' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ReturnCode') + ival_ = self.gds_validate_integer(ival_, node, 'ReturnCode') + self.ReturnCode = ival_ + self.ReturnCode_nsprefix_ = child_.prefix +# end class PTSPODRESULT + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSPODRESULT' + rootClass = PTSPODRESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSPODRESULT' + rootClass = PTSPODRESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSPODRESULT' + rootClass = PTSPODRESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSPODRESULT' + rootClass = PTSPODRESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from ptspod_result import *\n\n') + sys.stdout.write('import ptspod_result as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "PTSPODRESULT" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/ptsrre_result.py b/modules/connectors/usps/karrio/schemas/usps/ptsrre_result.py new file mode 100644 index 0000000000..ff4fd9892d --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/ptsrre_result.py @@ -0,0 +1,1328 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:43 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/ptsrre_result.py') +# +# Command line arguments: +# ./schemas/PTSRREResult.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/ptsrre_result.py" ./schemas/PTSRREResult.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class PTSRRERESULT(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ResultText=None, ReturnCode=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ResultText = ResultText + self.ResultText_nsprefix_ = None + self.ReturnCode = ReturnCode + self.ReturnCode_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PTSRRERESULT) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PTSRRERESULT.subclass: + return PTSRRERESULT.subclass(*args_, **kwargs_) + else: + return PTSRRERESULT(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ResultText(self): + return self.ResultText + def set_ResultText(self, ResultText): + self.ResultText = ResultText + def get_ReturnCode(self): + return self.ReturnCode + def set_ReturnCode(self, ReturnCode): + self.ReturnCode = ReturnCode + def has__content(self): + if ( + self.ResultText is not None or + self.ReturnCode is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PTSRRERESULT', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PTSRRERESULT') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PTSRRERESULT': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PTSRRERESULT') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PTSRRERESULT', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PTSRRERESULT'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PTSRRERESULT', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ResultText is not None: + namespaceprefix_ = self.ResultText_nsprefix_ + ':' if (UseCapturedNS_ and self.ResultText_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sResultText>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ResultText), input_name='ResultText')), namespaceprefix_ , eol_)) + if self.ReturnCode is not None: + namespaceprefix_ = self.ReturnCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ReturnCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sReturnCode>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ReturnCode, input_name='ReturnCode'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ResultText': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ResultText') + value_ = self.gds_validate_string(value_, node, 'ResultText') + self.ResultText = value_ + self.ResultText_nsprefix_ = child_.prefix + elif nodeName_ == 'ReturnCode' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ReturnCode') + ival_ = self.gds_validate_integer(ival_, node, 'ReturnCode') + self.ReturnCode = ival_ + self.ReturnCode_nsprefix_ = child_.prefix +# end class PTSRRERESULT + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSRRERESULT' + rootClass = PTSRRERESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSRRERESULT' + rootClass = PTSRRERESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSRRERESULT' + rootClass = PTSRRERESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSRRERESULT' + rootClass = PTSRRERESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from ptsrre_result import *\n\n') + sys.stdout.write('import ptsrre_result as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "PTSRRERESULT" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/ptstpod_request.py b/modules/connectors/usps/karrio/schemas/usps/ptstpod_request.py new file mode 100644 index 0000000000..a3625b7097 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/ptstpod_request.py @@ -0,0 +1,1522 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:44 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/ptstpod_request.py') +# +# Command line arguments: +# ./schemas/PTSTPodRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/ptstpod_request.py" ./schemas/PTSTPodRequest.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class PTSTPodRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, TrackId=None, ClientIp=None, MpSuffix=None, MpDate=None, RequestType=None, FirstName=None, LastName=None, Email1=None, Email2=None, Email3=None, TableCode=None, CustRegID=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.TrackId = TrackId + self.TrackId_nsprefix_ = None + self.ClientIp = ClientIp + self.ClientIp_nsprefix_ = None + self.MpSuffix = MpSuffix + self.MpSuffix_nsprefix_ = None + self.MpDate = MpDate + self.MpDate_nsprefix_ = None + self.RequestType = RequestType + self.RequestType_nsprefix_ = None + self.FirstName = FirstName + self.FirstName_nsprefix_ = None + self.LastName = LastName + self.LastName_nsprefix_ = None + self.Email1 = Email1 + self.Email1_nsprefix_ = None + self.Email2 = Email2 + self.Email2_nsprefix_ = None + self.Email3 = Email3 + self.Email3_nsprefix_ = None + self.TableCode = TableCode + self.TableCode_nsprefix_ = None + self.CustRegID = CustRegID + self.CustRegID_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PTSTPodRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PTSTPodRequest.subclass: + return PTSTPodRequest.subclass(*args_, **kwargs_) + else: + return PTSTPodRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_TrackId(self): + return self.TrackId + def set_TrackId(self, TrackId): + self.TrackId = TrackId + def get_ClientIp(self): + return self.ClientIp + def set_ClientIp(self, ClientIp): + self.ClientIp = ClientIp + def get_MpSuffix(self): + return self.MpSuffix + def set_MpSuffix(self, MpSuffix): + self.MpSuffix = MpSuffix + def get_MpDate(self): + return self.MpDate + def set_MpDate(self, MpDate): + self.MpDate = MpDate + def get_RequestType(self): + return self.RequestType + def set_RequestType(self, RequestType): + self.RequestType = RequestType + def get_FirstName(self): + return self.FirstName + def set_FirstName(self, FirstName): + self.FirstName = FirstName + def get_LastName(self): + return self.LastName + def set_LastName(self, LastName): + self.LastName = LastName + def get_Email1(self): + return self.Email1 + def set_Email1(self, Email1): + self.Email1 = Email1 + def get_Email2(self): + return self.Email2 + def set_Email2(self, Email2): + self.Email2 = Email2 + def get_Email3(self): + return self.Email3 + def set_Email3(self, Email3): + self.Email3 = Email3 + def get_TableCode(self): + return self.TableCode + def set_TableCode(self, TableCode): + self.TableCode = TableCode + def get_CustRegID(self): + return self.CustRegID + def set_CustRegID(self, CustRegID): + self.CustRegID = CustRegID + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.TrackId is not None or + self.ClientIp is not None or + self.MpSuffix is not None or + self.MpDate is not None or + self.RequestType is not None or + self.FirstName is not None or + self.LastName is not None or + self.Email1 is not None or + self.Email2 is not None or + self.Email3 is not None or + self.TableCode is not None or + self.CustRegID is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PTSTPodRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PTSTPodRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PTSTPodRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PTSTPodRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PTSTPodRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PTSTPodRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PTSTPodRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.TrackId is not None: + namespaceprefix_ = self.TrackId_nsprefix_ + ':' if (UseCapturedNS_ and self.TrackId_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTrackId>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TrackId), input_name='TrackId')), namespaceprefix_ , eol_)) + if self.ClientIp is not None: + namespaceprefix_ = self.ClientIp_nsprefix_ + ':' if (UseCapturedNS_ and self.ClientIp_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sClientIp>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ClientIp), input_name='ClientIp')), namespaceprefix_ , eol_)) + if self.MpSuffix is not None: + namespaceprefix_ = self.MpSuffix_nsprefix_ + ':' if (UseCapturedNS_ and self.MpSuffix_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMpSuffix>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.MpSuffix, input_name='MpSuffix'), namespaceprefix_ , eol_)) + if self.MpDate is not None: + namespaceprefix_ = self.MpDate_nsprefix_ + ':' if (UseCapturedNS_ and self.MpDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMpDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MpDate), input_name='MpDate')), namespaceprefix_ , eol_)) + if self.RequestType is not None: + namespaceprefix_ = self.RequestType_nsprefix_ + ':' if (UseCapturedNS_ and self.RequestType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRequestType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RequestType), input_name='RequestType')), namespaceprefix_ , eol_)) + if self.FirstName is not None: + namespaceprefix_ = self.FirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirstName), input_name='FirstName')), namespaceprefix_ , eol_)) + if self.LastName is not None: + namespaceprefix_ = self.LastName_nsprefix_ + ':' if (UseCapturedNS_ and self.LastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LastName), input_name='LastName')), namespaceprefix_ , eol_)) + if self.Email1 is not None: + namespaceprefix_ = self.Email1_nsprefix_ + ':' if (UseCapturedNS_ and self.Email1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEmail1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Email1), input_name='Email1')), namespaceprefix_ , eol_)) + if self.Email2 is not None: + namespaceprefix_ = self.Email2_nsprefix_ + ':' if (UseCapturedNS_ and self.Email2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEmail2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Email2), input_name='Email2')), namespaceprefix_ , eol_)) + if self.Email3 is not None: + namespaceprefix_ = self.Email3_nsprefix_ + ':' if (UseCapturedNS_ and self.Email3_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEmail3>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Email3), input_name='Email3')), namespaceprefix_ , eol_)) + if self.TableCode is not None: + namespaceprefix_ = self.TableCode_nsprefix_ + ':' if (UseCapturedNS_ and self.TableCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTableCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TableCode), input_name='TableCode')), namespaceprefix_ , eol_)) + if self.CustRegID is not None: + namespaceprefix_ = self.CustRegID_nsprefix_ + ':' if (UseCapturedNS_ and self.CustRegID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustRegID>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.CustRegID, input_name='CustRegID'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'TrackId': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'TrackId') + value_ = self.gds_validate_string(value_, node, 'TrackId') + self.TrackId = value_ + self.TrackId_nsprefix_ = child_.prefix + elif nodeName_ == 'ClientIp': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ClientIp') + value_ = self.gds_validate_string(value_, node, 'ClientIp') + self.ClientIp = value_ + self.ClientIp_nsprefix_ = child_.prefix + elif nodeName_ == 'MpSuffix' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'MpSuffix') + ival_ = self.gds_validate_integer(ival_, node, 'MpSuffix') + self.MpSuffix = ival_ + self.MpSuffix_nsprefix_ = child_.prefix + elif nodeName_ == 'MpDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'MpDate') + value_ = self.gds_validate_string(value_, node, 'MpDate') + self.MpDate = value_ + self.MpDate_nsprefix_ = child_.prefix + elif nodeName_ == 'RequestType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RequestType') + value_ = self.gds_validate_string(value_, node, 'RequestType') + self.RequestType = value_ + self.RequestType_nsprefix_ = child_.prefix + elif nodeName_ == 'FirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirstName') + value_ = self.gds_validate_string(value_, node, 'FirstName') + self.FirstName = value_ + self.FirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'LastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LastName') + value_ = self.gds_validate_string(value_, node, 'LastName') + self.LastName = value_ + self.LastName_nsprefix_ = child_.prefix + elif nodeName_ == 'Email1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Email1') + value_ = self.gds_validate_string(value_, node, 'Email1') + self.Email1 = value_ + self.Email1_nsprefix_ = child_.prefix + elif nodeName_ == 'Email2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Email2') + value_ = self.gds_validate_string(value_, node, 'Email2') + self.Email2 = value_ + self.Email2_nsprefix_ = child_.prefix + elif nodeName_ == 'Email3': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Email3') + value_ = self.gds_validate_string(value_, node, 'Email3') + self.Email3 = value_ + self.Email3_nsprefix_ = child_.prefix + elif nodeName_ == 'TableCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'TableCode') + value_ = self.gds_validate_string(value_, node, 'TableCode') + self.TableCode = value_ + self.TableCode_nsprefix_ = child_.prefix + elif nodeName_ == 'CustRegID' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'CustRegID') + ival_ = self.gds_validate_integer(ival_, node, 'CustRegID') + self.CustRegID = ival_ + self.CustRegID_nsprefix_ = child_.prefix +# end class PTSTPodRequest + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSTPodRequest' + rootClass = PTSTPodRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSTPodRequest' + rootClass = PTSTPodRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSTPodRequest' + rootClass = PTSTPodRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSTPodRequest' + rootClass = PTSTPodRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from ptstpod_request import *\n\n') + sys.stdout.write('import ptstpod_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "PTSTPodRequest" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/ptstpod_result.py b/modules/connectors/usps/karrio/schemas/usps/ptstpod_result.py new file mode 100644 index 0000000000..af16089d40 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/ptstpod_result.py @@ -0,0 +1,1328 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:44 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/ptstpod_result.py') +# +# Command line arguments: +# ./schemas/PTSTPODResult.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/ptstpod_result.py" ./schemas/PTSTPODResult.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class PTSTPODRESULT(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ResultText=None, ReturnCode=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ResultText = ResultText + self.ResultText_nsprefix_ = None + self.ReturnCode = ReturnCode + self.ReturnCode_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PTSTPODRESULT) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PTSTPODRESULT.subclass: + return PTSTPODRESULT.subclass(*args_, **kwargs_) + else: + return PTSTPODRESULT(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ResultText(self): + return self.ResultText + def set_ResultText(self, ResultText): + self.ResultText = ResultText + def get_ReturnCode(self): + return self.ReturnCode + def set_ReturnCode(self, ReturnCode): + self.ReturnCode = ReturnCode + def has__content(self): + if ( + self.ResultText is not None or + self.ReturnCode is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PTSTPODRESULT', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PTSTPODRESULT') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PTSTPODRESULT': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PTSTPODRESULT') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PTSTPODRESULT', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PTSTPODRESULT'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PTSTPODRESULT', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ResultText is not None: + namespaceprefix_ = self.ResultText_nsprefix_ + ':' if (UseCapturedNS_ and self.ResultText_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sResultText>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ResultText), input_name='ResultText')), namespaceprefix_ , eol_)) + if self.ReturnCode is not None: + namespaceprefix_ = self.ReturnCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ReturnCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sReturnCode>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ReturnCode, input_name='ReturnCode'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ResultText': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ResultText') + value_ = self.gds_validate_string(value_, node, 'ResultText') + self.ResultText = value_ + self.ResultText_nsprefix_ = child_.prefix + elif nodeName_ == 'ReturnCode' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ReturnCode') + ival_ = self.gds_validate_integer(ival_, node, 'ReturnCode') + self.ReturnCode = ival_ + self.ReturnCode_nsprefix_ = child_.prefix +# end class PTSTPODRESULT + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSTPODRESULT' + rootClass = PTSTPODRESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSTPODRESULT' + rootClass = PTSTPODRESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSTPODRESULT' + rootClass = PTSTPODRESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSTPODRESULT' + rootClass = PTSTPODRESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from ptstpod_result import *\n\n') + sys.stdout.write('import ptstpod_result as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "PTSTPODRESULT" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/rate_v4_request.py b/modules/connectors/usps/karrio/schemas/usps/rate_v4_request.py new file mode 100644 index 0000000000..22def8994c --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/rate_v4_request.py @@ -0,0 +1,2216 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:44 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/rate_v4_request.py') +# +# Command line arguments: +# ./schemas/RateV4Request.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/rate_v4_request.py" ./schemas/RateV4Request.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class RateV4Request(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, Revision=None, Package=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.Revision = Revision + self.Revision_nsprefix_ = None + if Package is None: + self.Package = [] + else: + self.Package = Package + self.Package_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, RateV4Request) + if subclass is not None: + return subclass(*args_, **kwargs_) + if RateV4Request.subclass: + return RateV4Request.subclass(*args_, **kwargs_) + else: + return RateV4Request(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Revision(self): + return self.Revision + def set_Revision(self, Revision): + self.Revision = Revision + def get_Package(self): + return self.Package + def set_Package(self, Package): + self.Package = Package + def add_Package(self, value): + self.Package.append(value) + def insert_Package_at(self, index, value): + self.Package.insert(index, value) + def replace_Package_at(self, index, value): + self.Package[index] = value + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.Revision is not None or + self.Package + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RateV4Request', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('RateV4Request') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'RateV4Request': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RateV4Request') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RateV4Request', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RateV4Request'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RateV4Request', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Revision is not None: + namespaceprefix_ = self.Revision_nsprefix_ + ':' if (UseCapturedNS_ and self.Revision_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRevision>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Revision), input_name='Revision')), namespaceprefix_ , eol_)) + for Package_ in self.Package: + namespaceprefix_ = self.Package_nsprefix_ + ':' if (UseCapturedNS_ and self.Package_nsprefix_) else '' + Package_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Package', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Revision': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Revision') + value_ = self.gds_validate_string(value_, node, 'Revision') + self.Revision = value_ + self.Revision_nsprefix_ = child_.prefix + elif nodeName_ == 'Package': + obj_ = PackageType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Package.append(obj_) + obj_.original_tagname_ = 'Package' +# end class RateV4Request + + +class PackageType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ID=None, Service=None, FirstClassMailType=None, ZipOrigination=None, ZipDestination=None, Pounds=None, Ounces=None, Container=None, Size=None, Width=None, Length=None, Height=None, Girth=None, Value=None, AmountToCollect=None, SpecialServices=None, Content=None, GroundOnly=None, SortBy=None, Machinable=None, ReturnLocations=None, ReturnServiceInfo=None, DropOffTime=None, ShipDate=None, ReturnDimensionalWeight=None, TrackingRetentionPeriod=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ID = _cast(None, ID) + self.ID_nsprefix_ = None + self.Service = Service + self.Service_nsprefix_ = None + self.FirstClassMailType = FirstClassMailType + self.FirstClassMailType_nsprefix_ = None + self.ZipOrigination = ZipOrigination + self.ZipOrigination_nsprefix_ = None + self.ZipDestination = ZipDestination + self.ZipDestination_nsprefix_ = None + self.Pounds = Pounds + self.validate_PoundsType(self.Pounds) + self.Pounds_nsprefix_ = None + self.Ounces = Ounces + self.validate_OuncesType(self.Ounces) + self.Ounces_nsprefix_ = None + self.Container = Container + self.Container_nsprefix_ = None + self.Size = Size + self.Size_nsprefix_ = None + self.Width = Width + self.Width_nsprefix_ = None + self.Length = Length + self.Length_nsprefix_ = None + self.Height = Height + self.Height_nsprefix_ = None + self.Girth = Girth + self.Girth_nsprefix_ = None + self.Value = Value + self.Value_nsprefix_ = None + self.AmountToCollect = AmountToCollect + self.AmountToCollect_nsprefix_ = None + self.SpecialServices = SpecialServices + self.SpecialServices_nsprefix_ = None + self.Content = Content + self.Content_nsprefix_ = None + self.GroundOnly = GroundOnly + self.GroundOnly_nsprefix_ = None + self.SortBy = SortBy + self.SortBy_nsprefix_ = None + self.Machinable = Machinable + self.Machinable_nsprefix_ = None + self.ReturnLocations = ReturnLocations + self.ReturnLocations_nsprefix_ = None + self.ReturnServiceInfo = ReturnServiceInfo + self.ReturnServiceInfo_nsprefix_ = None + self.DropOffTime = DropOffTime + self.DropOffTime_nsprefix_ = None + self.ShipDate = ShipDate + self.ShipDate_nsprefix_ = None + self.ReturnDimensionalWeight = ReturnDimensionalWeight + self.ReturnDimensionalWeight_nsprefix_ = None + self.TrackingRetentionPeriod = TrackingRetentionPeriod + self.TrackingRetentionPeriod_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PackageType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PackageType.subclass: + return PackageType.subclass(*args_, **kwargs_) + else: + return PackageType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Service(self): + return self.Service + def set_Service(self, Service): + self.Service = Service + def get_FirstClassMailType(self): + return self.FirstClassMailType + def set_FirstClassMailType(self, FirstClassMailType): + self.FirstClassMailType = FirstClassMailType + def get_ZipOrigination(self): + return self.ZipOrigination + def set_ZipOrigination(self, ZipOrigination): + self.ZipOrigination = ZipOrigination + def get_ZipDestination(self): + return self.ZipDestination + def set_ZipDestination(self, ZipDestination): + self.ZipDestination = ZipDestination + def get_Pounds(self): + return self.Pounds + def set_Pounds(self, Pounds): + self.Pounds = Pounds + def get_Ounces(self): + return self.Ounces + def set_Ounces(self, Ounces): + self.Ounces = Ounces + def get_Container(self): + return self.Container + def set_Container(self, Container): + self.Container = Container + def get_Size(self): + return self.Size + def set_Size(self, Size): + self.Size = Size + def get_Width(self): + return self.Width + def set_Width(self, Width): + self.Width = Width + def get_Length(self): + return self.Length + def set_Length(self, Length): + self.Length = Length + def get_Height(self): + return self.Height + def set_Height(self, Height): + self.Height = Height + def get_Girth(self): + return self.Girth + def set_Girth(self, Girth): + self.Girth = Girth + def get_Value(self): + return self.Value + def set_Value(self, Value): + self.Value = Value + def get_AmountToCollect(self): + return self.AmountToCollect + def set_AmountToCollect(self, AmountToCollect): + self.AmountToCollect = AmountToCollect + def get_SpecialServices(self): + return self.SpecialServices + def set_SpecialServices(self, SpecialServices): + self.SpecialServices = SpecialServices + def get_Content(self): + return self.Content + def set_Content(self, Content): + self.Content = Content + def get_GroundOnly(self): + return self.GroundOnly + def set_GroundOnly(self, GroundOnly): + self.GroundOnly = GroundOnly + def get_SortBy(self): + return self.SortBy + def set_SortBy(self, SortBy): + self.SortBy = SortBy + def get_Machinable(self): + return self.Machinable + def set_Machinable(self, Machinable): + self.Machinable = Machinable + def get_ReturnLocations(self): + return self.ReturnLocations + def set_ReturnLocations(self, ReturnLocations): + self.ReturnLocations = ReturnLocations + def get_ReturnServiceInfo(self): + return self.ReturnServiceInfo + def set_ReturnServiceInfo(self, ReturnServiceInfo): + self.ReturnServiceInfo = ReturnServiceInfo + def get_DropOffTime(self): + return self.DropOffTime + def set_DropOffTime(self, DropOffTime): + self.DropOffTime = DropOffTime + def get_ShipDate(self): + return self.ShipDate + def set_ShipDate(self, ShipDate): + self.ShipDate = ShipDate + def get_ReturnDimensionalWeight(self): + return self.ReturnDimensionalWeight + def set_ReturnDimensionalWeight(self, ReturnDimensionalWeight): + self.ReturnDimensionalWeight = ReturnDimensionalWeight + def get_TrackingRetentionPeriod(self): + return self.TrackingRetentionPeriod + def set_TrackingRetentionPeriod(self, TrackingRetentionPeriod): + self.TrackingRetentionPeriod = TrackingRetentionPeriod + def get_ID(self): + return self.ID + def set_ID(self, ID): + self.ID = ID + def validate_PoundsType(self, value): + result = True + # Validate type PoundsType, a restriction on xs:integer. + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' % {"value": value, "lineno": lineno, }) + return False + if value < 0: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on PoundsType' % {"value": value, "lineno": lineno} ) + result = False + if value > 70: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd maxInclusive restriction on PoundsType' % {"value": value, "lineno": lineno} ) + result = False + return result + def validate_OuncesType(self, value): + result = True + # Validate type OuncesType, a restriction on xs:decimal. + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, decimal_.Decimal): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (decimal_.Decimal)' % {"value": value, "lineno": lineno, }) + return False + if value < 0.0: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on OuncesType' % {"value": value, "lineno": lineno} ) + result = False + if value > 1120.0: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd maxInclusive restriction on OuncesType' % {"value": value, "lineno": lineno} ) + result = False + return result + def has__content(self): + if ( + self.Service is not None or + self.FirstClassMailType is not None or + self.ZipOrigination is not None or + self.ZipDestination is not None or + self.Pounds is not None or + self.Ounces is not None or + self.Container is not None or + self.Size is not None or + self.Width is not None or + self.Length is not None or + self.Height is not None or + self.Girth is not None or + self.Value is not None or + self.AmountToCollect is not None or + self.SpecialServices is not None or + self.Content is not None or + self.GroundOnly is not None or + self.SortBy is not None or + self.Machinable is not None or + self.ReturnLocations is not None or + self.ReturnServiceInfo is not None or + self.DropOffTime is not None or + self.ShipDate is not None or + self.ReturnDimensionalWeight is not None or + self.TrackingRetentionPeriod is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PackageType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PackageType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PackageType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PackageType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PackageType'): + if self.ID is not None and 'ID' not in already_processed: + already_processed.add('ID') + outfile.write(' ID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ID), input_name='ID')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Service is not None: + namespaceprefix_ = self.Service_nsprefix_ + ':' if (UseCapturedNS_ and self.Service_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sService>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Service), input_name='Service')), namespaceprefix_ , eol_)) + if self.FirstClassMailType is not None: + namespaceprefix_ = self.FirstClassMailType_nsprefix_ + ':' if (UseCapturedNS_ and self.FirstClassMailType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirstClassMailType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirstClassMailType), input_name='FirstClassMailType')), namespaceprefix_ , eol_)) + if self.ZipOrigination is not None: + namespaceprefix_ = self.ZipOrigination_nsprefix_ + ':' if (UseCapturedNS_ and self.ZipOrigination_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZipOrigination>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZipOrigination), input_name='ZipOrigination')), namespaceprefix_ , eol_)) + if self.ZipDestination is not None: + namespaceprefix_ = self.ZipDestination_nsprefix_ + ':' if (UseCapturedNS_ and self.ZipDestination_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZipDestination>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZipDestination), input_name='ZipDestination')), namespaceprefix_ , eol_)) + if self.Pounds is not None: + namespaceprefix_ = self.Pounds_nsprefix_ + ':' if (UseCapturedNS_ and self.Pounds_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPounds>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Pounds, input_name='Pounds'), namespaceprefix_ , eol_)) + if self.Ounces is not None: + namespaceprefix_ = self.Ounces_nsprefix_ + ':' if (UseCapturedNS_ and self.Ounces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOunces>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Ounces, input_name='Ounces'), namespaceprefix_ , eol_)) + if self.Container is not None: + namespaceprefix_ = self.Container_nsprefix_ + ':' if (UseCapturedNS_ and self.Container_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContainer>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Container), input_name='Container')), namespaceprefix_ , eol_)) + if self.Size is not None: + namespaceprefix_ = self.Size_nsprefix_ + ':' if (UseCapturedNS_ and self.Size_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSize>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Size), input_name='Size')), namespaceprefix_ , eol_)) + if self.Width is not None: + namespaceprefix_ = self.Width_nsprefix_ + ':' if (UseCapturedNS_ and self.Width_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sWidth>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Width, input_name='Width'), namespaceprefix_ , eol_)) + if self.Length is not None: + namespaceprefix_ = self.Length_nsprefix_ + ':' if (UseCapturedNS_ and self.Length_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLength>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Length, input_name='Length'), namespaceprefix_ , eol_)) + if self.Height is not None: + namespaceprefix_ = self.Height_nsprefix_ + ':' if (UseCapturedNS_ and self.Height_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHeight>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Height, input_name='Height'), namespaceprefix_ , eol_)) + if self.Girth is not None: + namespaceprefix_ = self.Girth_nsprefix_ + ':' if (UseCapturedNS_ and self.Girth_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGirth>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Girth, input_name='Girth'), namespaceprefix_ , eol_)) + if self.Value is not None: + namespaceprefix_ = self.Value_nsprefix_ + ':' if (UseCapturedNS_ and self.Value_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sValue>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Value), input_name='Value')), namespaceprefix_ , eol_)) + if self.AmountToCollect is not None: + namespaceprefix_ = self.AmountToCollect_nsprefix_ + ':' if (UseCapturedNS_ and self.AmountToCollect_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAmountToCollect>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AmountToCollect), input_name='AmountToCollect')), namespaceprefix_ , eol_)) + if self.SpecialServices is not None: + namespaceprefix_ = self.SpecialServices_nsprefix_ + ':' if (UseCapturedNS_ and self.SpecialServices_nsprefix_) else '' + self.SpecialServices.export(outfile, level, namespaceprefix_, namespacedef_='', name_='SpecialServices', pretty_print=pretty_print) + if self.Content is not None: + namespaceprefix_ = self.Content_nsprefix_ + ':' if (UseCapturedNS_ and self.Content_nsprefix_) else '' + self.Content.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Content', pretty_print=pretty_print) + if self.GroundOnly is not None: + namespaceprefix_ = self.GroundOnly_nsprefix_ + ':' if (UseCapturedNS_ and self.GroundOnly_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGroundOnly>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.GroundOnly, input_name='GroundOnly'), namespaceprefix_ , eol_)) + if self.SortBy is not None: + namespaceprefix_ = self.SortBy_nsprefix_ + ':' if (UseCapturedNS_ and self.SortBy_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSortBy>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SortBy), input_name='SortBy')), namespaceprefix_ , eol_)) + if self.Machinable is not None: + namespaceprefix_ = self.Machinable_nsprefix_ + ':' if (UseCapturedNS_ and self.Machinable_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMachinable>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.Machinable, input_name='Machinable'), namespaceprefix_ , eol_)) + if self.ReturnLocations is not None: + namespaceprefix_ = self.ReturnLocations_nsprefix_ + ':' if (UseCapturedNS_ and self.ReturnLocations_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sReturnLocations>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.ReturnLocations, input_name='ReturnLocations'), namespaceprefix_ , eol_)) + if self.ReturnServiceInfo is not None: + namespaceprefix_ = self.ReturnServiceInfo_nsprefix_ + ':' if (UseCapturedNS_ and self.ReturnServiceInfo_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sReturnServiceInfo>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.ReturnServiceInfo, input_name='ReturnServiceInfo'), namespaceprefix_ , eol_)) + if self.DropOffTime is not None: + namespaceprefix_ = self.DropOffTime_nsprefix_ + ':' if (UseCapturedNS_ and self.DropOffTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDropOffTime>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DropOffTime), input_name='DropOffTime')), namespaceprefix_ , eol_)) + if self.ShipDate is not None: + namespaceprefix_ = self.ShipDate_nsprefix_ + ':' if (UseCapturedNS_ and self.ShipDate_nsprefix_) else '' + self.ShipDate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ShipDate', pretty_print=pretty_print) + if self.ReturnDimensionalWeight is not None: + namespaceprefix_ = self.ReturnDimensionalWeight_nsprefix_ + ':' if (UseCapturedNS_ and self.ReturnDimensionalWeight_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sReturnDimensionalWeight>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ReturnDimensionalWeight), input_name='ReturnDimensionalWeight')), namespaceprefix_ , eol_)) + if self.TrackingRetentionPeriod is not None: + namespaceprefix_ = self.TrackingRetentionPeriod_nsprefix_ + ':' if (UseCapturedNS_ and self.TrackingRetentionPeriod_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTrackingRetentionPeriod>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TrackingRetentionPeriod), input_name='TrackingRetentionPeriod')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ID', node) + if value is not None and 'ID' not in already_processed: + already_processed.add('ID') + self.ID = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Service': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Service') + value_ = self.gds_validate_string(value_, node, 'Service') + self.Service = value_ + self.Service_nsprefix_ = child_.prefix + elif nodeName_ == 'FirstClassMailType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirstClassMailType') + value_ = self.gds_validate_string(value_, node, 'FirstClassMailType') + self.FirstClassMailType = value_ + self.FirstClassMailType_nsprefix_ = child_.prefix + elif nodeName_ == 'ZipOrigination': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZipOrigination') + value_ = self.gds_validate_string(value_, node, 'ZipOrigination') + self.ZipOrigination = value_ + self.ZipOrigination_nsprefix_ = child_.prefix + elif nodeName_ == 'ZipDestination': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZipDestination') + value_ = self.gds_validate_string(value_, node, 'ZipDestination') + self.ZipDestination = value_ + self.ZipDestination_nsprefix_ = child_.prefix + elif nodeName_ == 'Pounds' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Pounds') + ival_ = self.gds_validate_integer(ival_, node, 'Pounds') + self.Pounds = ival_ + self.Pounds_nsprefix_ = child_.prefix + # validate type PoundsType + self.validate_PoundsType(self.Pounds) + elif nodeName_ == 'Ounces' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Ounces') + fval_ = self.gds_validate_decimal(fval_, node, 'Ounces') + self.Ounces = fval_ + self.Ounces_nsprefix_ = child_.prefix + # validate type OuncesType + self.validate_OuncesType(self.Ounces) + elif nodeName_ == 'Container': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Container') + value_ = self.gds_validate_string(value_, node, 'Container') + self.Container = value_ + self.Container_nsprefix_ = child_.prefix + elif nodeName_ == 'Size': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Size') + value_ = self.gds_validate_string(value_, node, 'Size') + self.Size = value_ + self.Size_nsprefix_ = child_.prefix + elif nodeName_ == 'Width' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Width') + fval_ = self.gds_validate_decimal(fval_, node, 'Width') + self.Width = fval_ + self.Width_nsprefix_ = child_.prefix + elif nodeName_ == 'Length' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Length') + fval_ = self.gds_validate_decimal(fval_, node, 'Length') + self.Length = fval_ + self.Length_nsprefix_ = child_.prefix + elif nodeName_ == 'Height' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Height') + fval_ = self.gds_validate_decimal(fval_, node, 'Height') + self.Height = fval_ + self.Height_nsprefix_ = child_.prefix + elif nodeName_ == 'Girth' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Girth') + fval_ = self.gds_validate_decimal(fval_, node, 'Girth') + self.Girth = fval_ + self.Girth_nsprefix_ = child_.prefix + elif nodeName_ == 'Value': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Value') + value_ = self.gds_validate_string(value_, node, 'Value') + self.Value = value_ + self.Value_nsprefix_ = child_.prefix + elif nodeName_ == 'AmountToCollect': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AmountToCollect') + value_ = self.gds_validate_string(value_, node, 'AmountToCollect') + self.AmountToCollect = value_ + self.AmountToCollect_nsprefix_ = child_.prefix + elif nodeName_ == 'SpecialServices': + obj_ = SpecialServicesType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.SpecialServices = obj_ + obj_.original_tagname_ = 'SpecialServices' + elif nodeName_ == 'Content': + obj_ = ContentType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Content = obj_ + obj_.original_tagname_ = 'Content' + elif nodeName_ == 'GroundOnly': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'GroundOnly') + ival_ = self.gds_validate_boolean(ival_, node, 'GroundOnly') + self.GroundOnly = ival_ + self.GroundOnly_nsprefix_ = child_.prefix + elif nodeName_ == 'SortBy': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SortBy') + value_ = self.gds_validate_string(value_, node, 'SortBy') + self.SortBy = value_ + self.SortBy_nsprefix_ = child_.prefix + elif nodeName_ == 'Machinable': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'Machinable') + ival_ = self.gds_validate_boolean(ival_, node, 'Machinable') + self.Machinable = ival_ + self.Machinable_nsprefix_ = child_.prefix + elif nodeName_ == 'ReturnLocations': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'ReturnLocations') + ival_ = self.gds_validate_boolean(ival_, node, 'ReturnLocations') + self.ReturnLocations = ival_ + self.ReturnLocations_nsprefix_ = child_.prefix + elif nodeName_ == 'ReturnServiceInfo': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'ReturnServiceInfo') + ival_ = self.gds_validate_boolean(ival_, node, 'ReturnServiceInfo') + self.ReturnServiceInfo = ival_ + self.ReturnServiceInfo_nsprefix_ = child_.prefix + elif nodeName_ == 'DropOffTime': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DropOffTime') + value_ = self.gds_validate_string(value_, node, 'DropOffTime') + self.DropOffTime = value_ + self.DropOffTime_nsprefix_ = child_.prefix + elif nodeName_ == 'ShipDate': + obj_ = ShipDateType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ShipDate = obj_ + obj_.original_tagname_ = 'ShipDate' + elif nodeName_ == 'ReturnDimensionalWeight': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ReturnDimensionalWeight') + value_ = self.gds_validate_string(value_, node, 'ReturnDimensionalWeight') + self.ReturnDimensionalWeight = value_ + self.ReturnDimensionalWeight_nsprefix_ = child_.prefix + elif nodeName_ == 'TrackingRetentionPeriod': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'TrackingRetentionPeriod') + value_ = self.gds_validate_string(value_, node, 'TrackingRetentionPeriod') + self.TrackingRetentionPeriod = value_ + self.TrackingRetentionPeriod_nsprefix_ = child_.prefix +# end class PackageType + + +class SpecialServicesType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, SpecialService=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if SpecialService is None: + self.SpecialService = [] + else: + self.SpecialService = SpecialService + self.SpecialService_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, SpecialServicesType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if SpecialServicesType.subclass: + return SpecialServicesType.subclass(*args_, **kwargs_) + else: + return SpecialServicesType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_SpecialService(self): + return self.SpecialService + def set_SpecialService(self, SpecialService): + self.SpecialService = SpecialService + def add_SpecialService(self, value): + self.SpecialService.append(value) + def insert_SpecialService_at(self, index, value): + self.SpecialService.insert(index, value) + def replace_SpecialService_at(self, index, value): + self.SpecialService[index] = value + def has__content(self): + if ( + self.SpecialService + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpecialServicesType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('SpecialServicesType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'SpecialServicesType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpecialServicesType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpecialServicesType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpecialServicesType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpecialServicesType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for SpecialService_ in self.SpecialService: + namespaceprefix_ = self.SpecialService_nsprefix_ + ':' if (UseCapturedNS_ and self.SpecialService_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSpecialService>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(SpecialService_), input_name='SpecialService')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'SpecialService': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SpecialService') + value_ = self.gds_validate_string(value_, node, 'SpecialService') + self.SpecialService.append(value_) + self.SpecialService_nsprefix_ = child_.prefix +# end class SpecialServicesType + + +class ContentType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ContentType_member=None, ContentDescription=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ContentType = ContentType_member + self.ContentType_nsprefix_ = None + self.ContentDescription = ContentDescription + self.ContentDescription_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ContentType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ContentType.subclass: + return ContentType.subclass(*args_, **kwargs_) + else: + return ContentType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ContentType(self): + return self.ContentType + def set_ContentType(self, ContentType): + self.ContentType = ContentType + def get_ContentDescription(self): + return self.ContentDescription + def set_ContentDescription(self, ContentDescription): + self.ContentDescription = ContentDescription + def has__content(self): + if ( + self.ContentType is not None or + self.ContentDescription is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContentType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ContentType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ContentType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContentType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ContentType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ContentType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContentType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ContentType is not None: + namespaceprefix_ = self.ContentType_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentType), input_name='ContentType')), namespaceprefix_ , eol_)) + if self.ContentDescription is not None: + namespaceprefix_ = self.ContentDescription_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentDescription_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentDescription>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentDescription), input_name='ContentDescription')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ContentType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentType') + value_ = self.gds_validate_string(value_, node, 'ContentType') + self.ContentType = value_ + self.ContentType_nsprefix_ = child_.prefix + elif nodeName_ == 'ContentDescription': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentDescription') + value_ = self.gds_validate_string(value_, node, 'ContentDescription') + self.ContentDescription = value_ + self.ContentDescription_nsprefix_ = child_.prefix +# end class ContentType + + +class ShipDateType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Option=None, valueOf_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Option = _cast(None, Option) + self.Option_nsprefix_ = None + self.valueOf_ = valueOf_ + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ShipDateType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ShipDateType.subclass: + return ShipDateType.subclass(*args_, **kwargs_) + else: + return ShipDateType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Option(self): + return self.Option + def set_Option(self, Option): + self.Option = Option + def get_valueOf_(self): return self.valueOf_ + def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_ + def has__content(self): + if ( + (1 if type(self.valueOf_) in [int,float] else self.valueOf_) + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ShipDateType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ShipDateType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ShipDateType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ShipDateType') + outfile.write('>') + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_, pretty_print=pretty_print) + outfile.write(self.convert_unicode(self.valueOf_)) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ShipDateType'): + if self.Option is not None and 'Option' not in already_processed: + already_processed.add('Option') + outfile.write(' Option=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.Option), input_name='Option')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ShipDateType', fromsubclass_=False, pretty_print=True): + pass + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Option', node) + if value is not None and 'Option' not in already_processed: + already_processed.add('Option') + self.Option = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + pass +# end class ShipDateType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RateV4Request' + rootClass = RateV4Request + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RateV4Request' + rootClass = RateV4Request + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RateV4Request' + rootClass = RateV4Request + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RateV4Request' + rootClass = RateV4Request + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from rate_v4_request import *\n\n') + sys.stdout.write('import rate_v4_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "ContentType", + "PackageType", + "RateV4Request", + "ShipDateType", + "SpecialServicesType" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/rate_v4_response.py b/modules/connectors/usps/karrio/schemas/usps/rate_v4_response.py new file mode 100644 index 0000000000..57f35c58b1 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/rate_v4_response.py @@ -0,0 +1,2719 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:45 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/rate_v4_response.py') +# +# Command line arguments: +# ./schemas/RateV4Response.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/rate_v4_response.py" ./schemas/RateV4Response.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class RateV4Response(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Package=None, valueOf_=None, mixedclass_=None, content_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if Package is None: + self.Package = [] + else: + self.Package = Package + self.Package_nsprefix_ = None + self.valueOf_ = valueOf_ + if mixedclass_ is None: + self.mixedclass_ = MixedContainer + else: + self.mixedclass_ = mixedclass_ + if content_ is None: + self.content_ = [] + else: + self.content_ = content_ + self.valueOf_ = valueOf_ + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, RateV4Response) + if subclass is not None: + return subclass(*args_, **kwargs_) + if RateV4Response.subclass: + return RateV4Response.subclass(*args_, **kwargs_) + else: + return RateV4Response(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Package(self): + return self.Package + def set_Package(self, Package): + self.Package = Package + def add_Package(self, value): + self.Package.append(value) + def insert_Package_at(self, index, value): + self.Package.insert(index, value) + def replace_Package_at(self, index, value): + self.Package[index] = value + def get_valueOf_(self): return self.valueOf_ + def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_ + def has__content(self): + if ( + self.Package or + (1 if type(self.valueOf_) in [int,float] else self.valueOf_) or + self.content_ + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RateV4Response', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('RateV4Response') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'RateV4Response': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RateV4Response') + outfile.write('>') + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_, pretty_print=pretty_print) + outfile.write(self.convert_unicode(self.valueOf_)) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RateV4Response'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RateV4Response', fromsubclass_=False, pretty_print=True): + if not fromsubclass_: + for item_ in self.content_: + item_.export(outfile, level, item_.name, namespaceprefix_, pretty_print=pretty_print) + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for Package_ in self.Package: + namespaceprefix_ = self.Package_nsprefix_ + ':' if (UseCapturedNS_ and self.Package_nsprefix_) else '' + Package_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Package', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + if node.text is not None: + obj_ = self.mixedclass_(MixedContainer.CategoryText, + MixedContainer.TypeNone, '', node.text) + self.content_.append(obj_) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Package': + obj_ = PackageType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + obj_ = self.mixedclass_(MixedContainer.CategoryComplex, + MixedContainer.TypeNone, 'Package', obj_) + self.content_.append(obj_) + if hasattr(self, 'add_Package'): + self.add_Package(obj_.value) + elif hasattr(self, 'set_Package'): + self.set_Package(obj_.value) + if not fromsubclass_ and child_.tail is not None: + obj_ = self.mixedclass_(MixedContainer.CategoryText, + MixedContainer.TypeNone, '', child_.tail) + self.content_.append(obj_) +# end class RateV4Response + + +class PackageType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ID=None, Error=None, ZipOrigination=None, ZipDestination=None, Pounds=None, Ounces=None, FirstClassMailType=None, Container=None, Size=None, Width=None, Length=None, Height=None, Girth=None, Machinable=None, Zone=None, Postage=None, Restriction=None, valueOf_=None, mixedclass_=None, content_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ID = _cast(None, ID) + self.ID_nsprefix_ = None + self.Error = Error + self.Error_nsprefix_ = None + self.ZipOrigination = ZipOrigination + self.ZipOrigination_nsprefix_ = None + self.ZipDestination = ZipDestination + self.ZipDestination_nsprefix_ = None + self.Pounds = Pounds + self.Pounds_nsprefix_ = None + self.Ounces = Ounces + self.Ounces_nsprefix_ = None + self.FirstClassMailType = FirstClassMailType + self.FirstClassMailType_nsprefix_ = None + self.Container = Container + self.Container_nsprefix_ = None + self.Size = Size + self.Size_nsprefix_ = None + self.Width = Width + self.Width_nsprefix_ = None + self.Length = Length + self.Length_nsprefix_ = None + self.Height = Height + self.Height_nsprefix_ = None + self.Girth = Girth + self.Girth_nsprefix_ = None + self.Machinable = Machinable + self.Machinable_nsprefix_ = None + self.Zone = Zone + self.Zone_nsprefix_ = None + if Postage is None: + self.Postage = [] + else: + self.Postage = Postage + self.Postage_nsprefix_ = None + self.Restriction = Restriction + self.Restriction_nsprefix_ = None + self.valueOf_ = valueOf_ + if mixedclass_ is None: + self.mixedclass_ = MixedContainer + else: + self.mixedclass_ = mixedclass_ + if content_ is None: + self.content_ = [] + else: + self.content_ = content_ + self.valueOf_ = valueOf_ + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PackageType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PackageType.subclass: + return PackageType.subclass(*args_, **kwargs_) + else: + return PackageType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Error(self): + return self.Error + def set_Error(self, Error): + self.Error = Error + def get_ZipOrigination(self): + return self.ZipOrigination + def set_ZipOrigination(self, ZipOrigination): + self.ZipOrigination = ZipOrigination + def get_ZipDestination(self): + return self.ZipDestination + def set_ZipDestination(self, ZipDestination): + self.ZipDestination = ZipDestination + def get_Pounds(self): + return self.Pounds + def set_Pounds(self, Pounds): + self.Pounds = Pounds + def get_Ounces(self): + return self.Ounces + def set_Ounces(self, Ounces): + self.Ounces = Ounces + def get_FirstClassMailType(self): + return self.FirstClassMailType + def set_FirstClassMailType(self, FirstClassMailType): + self.FirstClassMailType = FirstClassMailType + def get_Container(self): + return self.Container + def set_Container(self, Container): + self.Container = Container + def get_Size(self): + return self.Size + def set_Size(self, Size): + self.Size = Size + def get_Width(self): + return self.Width + def set_Width(self, Width): + self.Width = Width + def get_Length(self): + return self.Length + def set_Length(self, Length): + self.Length = Length + def get_Height(self): + return self.Height + def set_Height(self, Height): + self.Height = Height + def get_Girth(self): + return self.Girth + def set_Girth(self, Girth): + self.Girth = Girth + def get_Machinable(self): + return self.Machinable + def set_Machinable(self, Machinable): + self.Machinable = Machinable + def get_Zone(self): + return self.Zone + def set_Zone(self, Zone): + self.Zone = Zone + def get_Postage(self): + return self.Postage + def set_Postage(self, Postage): + self.Postage = Postage + def add_Postage(self, value): + self.Postage.append(value) + def insert_Postage_at(self, index, value): + self.Postage.insert(index, value) + def replace_Postage_at(self, index, value): + self.Postage[index] = value + def get_Restriction(self): + return self.Restriction + def set_Restriction(self, Restriction): + self.Restriction = Restriction + def get_ID(self): + return self.ID + def set_ID(self, ID): + self.ID = ID + def get_valueOf_(self): return self.valueOf_ + def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_ + def has__content(self): + if ( + self.Error is not None or + self.ZipOrigination is not None or + self.ZipDestination is not None or + self.Pounds is not None or + self.Ounces is not None or + self.FirstClassMailType is not None or + self.Container is not None or + self.Size is not None or + self.Width is not None or + self.Length is not None or + self.Height is not None or + self.Girth is not None or + self.Machinable is not None or + self.Zone is not None or + self.Postage or + self.Restriction is not None or + (1 if type(self.valueOf_) in [int,float] else self.valueOf_) or + self.content_ + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PackageType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PackageType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PackageType') + outfile.write('>') + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_, pretty_print=pretty_print) + outfile.write(self.convert_unicode(self.valueOf_)) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PackageType'): + if self.ID is not None and 'ID' not in already_processed: + already_processed.add('ID') + outfile.write(' ID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ID), input_name='ID')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', fromsubclass_=False, pretty_print=True): + if not fromsubclass_: + for item_ in self.content_: + item_.export(outfile, level, item_.name, namespaceprefix_, pretty_print=pretty_print) + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Error is not None: + namespaceprefix_ = self.Error_nsprefix_ + ':' if (UseCapturedNS_ and self.Error_nsprefix_) else '' + self.Error.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Error', pretty_print=pretty_print) + if self.ZipOrigination is not None: + namespaceprefix_ = self.ZipOrigination_nsprefix_ + ':' if (UseCapturedNS_ and self.ZipOrigination_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZipOrigination>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZipOrigination), input_name='ZipOrigination')), namespaceprefix_ , eol_)) + if self.ZipDestination is not None: + namespaceprefix_ = self.ZipDestination_nsprefix_ + ':' if (UseCapturedNS_ and self.ZipDestination_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZipDestination>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZipDestination), input_name='ZipDestination')), namespaceprefix_ , eol_)) + if self.Pounds is not None: + namespaceprefix_ = self.Pounds_nsprefix_ + ':' if (UseCapturedNS_ and self.Pounds_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPounds>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Pounds, input_name='Pounds'), namespaceprefix_ , eol_)) + if self.Ounces is not None: + namespaceprefix_ = self.Ounces_nsprefix_ + ':' if (UseCapturedNS_ and self.Ounces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOunces>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Ounces, input_name='Ounces'), namespaceprefix_ , eol_)) + if self.FirstClassMailType is not None: + namespaceprefix_ = self.FirstClassMailType_nsprefix_ + ':' if (UseCapturedNS_ and self.FirstClassMailType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirstClassMailType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirstClassMailType), input_name='FirstClassMailType')), namespaceprefix_ , eol_)) + if self.Container is not None: + namespaceprefix_ = self.Container_nsprefix_ + ':' if (UseCapturedNS_ and self.Container_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContainer>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Container), input_name='Container')), namespaceprefix_ , eol_)) + if self.Size is not None: + namespaceprefix_ = self.Size_nsprefix_ + ':' if (UseCapturedNS_ and self.Size_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSize>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Size), input_name='Size')), namespaceprefix_ , eol_)) + if self.Width is not None: + namespaceprefix_ = self.Width_nsprefix_ + ':' if (UseCapturedNS_ and self.Width_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sWidth>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Width, input_name='Width'), namespaceprefix_ , eol_)) + if self.Length is not None: + namespaceprefix_ = self.Length_nsprefix_ + ':' if (UseCapturedNS_ and self.Length_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLength>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Length, input_name='Length'), namespaceprefix_ , eol_)) + if self.Height is not None: + namespaceprefix_ = self.Height_nsprefix_ + ':' if (UseCapturedNS_ and self.Height_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHeight>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Height, input_name='Height'), namespaceprefix_ , eol_)) + if self.Girth is not None: + namespaceprefix_ = self.Girth_nsprefix_ + ':' if (UseCapturedNS_ and self.Girth_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGirth>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Girth, input_name='Girth'), namespaceprefix_ , eol_)) + if self.Machinable is not None: + namespaceprefix_ = self.Machinable_nsprefix_ + ':' if (UseCapturedNS_ and self.Machinable_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMachinable>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Machinable), input_name='Machinable')), namespaceprefix_ , eol_)) + if self.Zone is not None: + namespaceprefix_ = self.Zone_nsprefix_ + ':' if (UseCapturedNS_ and self.Zone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZone>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Zone, input_name='Zone'), namespaceprefix_ , eol_)) + for Postage_ in self.Postage: + namespaceprefix_ = self.Postage_nsprefix_ + ':' if (UseCapturedNS_ and self.Postage_nsprefix_) else '' + Postage_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Postage', pretty_print=pretty_print) + if self.Restriction is not None: + namespaceprefix_ = self.Restriction_nsprefix_ + ':' if (UseCapturedNS_ and self.Restriction_nsprefix_) else '' + self.Restriction.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Restriction', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + if node.text is not None: + obj_ = self.mixedclass_(MixedContainer.CategoryText, + MixedContainer.TypeNone, '', node.text) + self.content_.append(obj_) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ID', node) + if value is not None and 'ID' not in already_processed: + already_processed.add('ID') + self.ID = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Error': + obj_ = ErrorType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + obj_ = self.mixedclass_(MixedContainer.CategoryComplex, + MixedContainer.TypeNone, 'Error', obj_) + self.content_.append(obj_) + if hasattr(self, 'add_Error'): + self.add_Error(obj_.value) + elif hasattr(self, 'set_Error'): + self.set_Error(obj_.value) + elif nodeName_ == 'ZipOrigination' and child_.text is not None: + valuestr_ = child_.text + valuestr_ = self.gds_parse_string(valuestr_, node, 'ZipOrigination') + valuestr_ = self.gds_validate_string(valuestr_, node, 'ZipOrigination') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeString, 'ZipOrigination', valuestr_) + self.content_.append(obj_) + self.ZipOrigination_nsprefix_ = child_.prefix + elif nodeName_ == 'ZipDestination' and child_.text is not None: + valuestr_ = child_.text + valuestr_ = self.gds_parse_string(valuestr_, node, 'ZipDestination') + valuestr_ = self.gds_validate_string(valuestr_, node, 'ZipDestination') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeString, 'ZipDestination', valuestr_) + self.content_.append(obj_) + self.ZipDestination_nsprefix_ = child_.prefix + elif nodeName_ == 'Pounds' and child_.text is not None: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Pounds') + ival_ = self.gds_validate_integer(ival_, node, 'Pounds') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeInteger, 'Pounds', ival_) + self.content_.append(obj_) + self.Pounds_nsprefix_ = child_.prefix + elif nodeName_ == 'Ounces' and child_.text is not None: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Ounces') + fval_ = self.gds_validate_decimal(fval_, node, 'Ounces') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeFloat, 'Ounces', fval_) + self.content_.append(obj_) + self.Ounces_nsprefix_ = child_.prefix + elif nodeName_ == 'FirstClassMailType' and child_.text is not None: + valuestr_ = child_.text + valuestr_ = self.gds_parse_string(valuestr_, node, 'FirstClassMailType') + valuestr_ = self.gds_validate_string(valuestr_, node, 'FirstClassMailType') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeString, 'FirstClassMailType', valuestr_) + self.content_.append(obj_) + self.FirstClassMailType_nsprefix_ = child_.prefix + elif nodeName_ == 'Container' and child_.text is not None: + valuestr_ = child_.text + valuestr_ = self.gds_parse_string(valuestr_, node, 'Container') + valuestr_ = self.gds_validate_string(valuestr_, node, 'Container') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeString, 'Container', valuestr_) + self.content_.append(obj_) + self.Container_nsprefix_ = child_.prefix + elif nodeName_ == 'Size' and child_.text is not None: + valuestr_ = child_.text + valuestr_ = self.gds_parse_string(valuestr_, node, 'Size') + valuestr_ = self.gds_validate_string(valuestr_, node, 'Size') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeString, 'Size', valuestr_) + self.content_.append(obj_) + self.Size_nsprefix_ = child_.prefix + elif nodeName_ == 'Width' and child_.text is not None: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Width') + fval_ = self.gds_validate_decimal(fval_, node, 'Width') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeFloat, 'Width', fval_) + self.content_.append(obj_) + self.Width_nsprefix_ = child_.prefix + elif nodeName_ == 'Length' and child_.text is not None: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Length') + fval_ = self.gds_validate_decimal(fval_, node, 'Length') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeFloat, 'Length', fval_) + self.content_.append(obj_) + self.Length_nsprefix_ = child_.prefix + elif nodeName_ == 'Height' and child_.text is not None: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Height') + fval_ = self.gds_validate_decimal(fval_, node, 'Height') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeFloat, 'Height', fval_) + self.content_.append(obj_) + self.Height_nsprefix_ = child_.prefix + elif nodeName_ == 'Girth' and child_.text is not None: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Girth') + fval_ = self.gds_validate_decimal(fval_, node, 'Girth') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeFloat, 'Girth', fval_) + self.content_.append(obj_) + self.Girth_nsprefix_ = child_.prefix + elif nodeName_ == 'Machinable' and child_.text is not None: + valuestr_ = child_.text + valuestr_ = self.gds_parse_string(valuestr_, node, 'Machinable') + valuestr_ = self.gds_validate_string(valuestr_, node, 'Machinable') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeString, 'Machinable', valuestr_) + self.content_.append(obj_) + self.Machinable_nsprefix_ = child_.prefix + elif nodeName_ == 'Zone' and child_.text is not None: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Zone') + ival_ = self.gds_validate_integer(ival_, node, 'Zone') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeInteger, 'Zone', ival_) + self.content_.append(obj_) + self.Zone_nsprefix_ = child_.prefix + elif nodeName_ == 'Postage': + obj_ = PostageType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + obj_ = self.mixedclass_(MixedContainer.CategoryComplex, + MixedContainer.TypeNone, 'Postage', obj_) + self.content_.append(obj_) + if hasattr(self, 'add_Postage'): + self.add_Postage(obj_.value) + elif hasattr(self, 'set_Postage'): + self.set_Postage(obj_.value) + elif nodeName_ == 'Restriction': + obj_ = RestrictionType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + obj_ = self.mixedclass_(MixedContainer.CategoryComplex, + MixedContainer.TypeNone, 'Restriction', obj_) + self.content_.append(obj_) + if hasattr(self, 'add_Restriction'): + self.add_Restriction(obj_.value) + elif hasattr(self, 'set_Restriction'): + self.set_Restriction(obj_.value) + if not fromsubclass_ and child_.tail is not None: + obj_ = self.mixedclass_(MixedContainer.CategoryText, + MixedContainer.TypeNone, '', child_.tail) + self.content_.append(obj_) +# end class PackageType + + +class ErrorType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Number=None, Source=None, Description=None, HelpFile=None, HelpContext=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Number = Number + self.Number_nsprefix_ = None + self.Source = Source + self.Source_nsprefix_ = None + self.Description = Description + self.Description_nsprefix_ = None + self.HelpFile = HelpFile + self.HelpFile_nsprefix_ = None + self.HelpContext = HelpContext + self.HelpContext_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ErrorType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ErrorType.subclass: + return ErrorType.subclass(*args_, **kwargs_) + else: + return ErrorType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Number(self): + return self.Number + def set_Number(self, Number): + self.Number = Number + def get_Source(self): + return self.Source + def set_Source(self, Source): + self.Source = Source + def get_Description(self): + return self.Description + def set_Description(self, Description): + self.Description = Description + def get_HelpFile(self): + return self.HelpFile + def set_HelpFile(self, HelpFile): + self.HelpFile = HelpFile + def get_HelpContext(self): + return self.HelpContext + def set_HelpContext(self, HelpContext): + self.HelpContext = HelpContext + def has__content(self): + if ( + self.Number is not None or + self.Source is not None or + self.Description is not None or + self.HelpFile is not None or + self.HelpContext is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ErrorType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ErrorType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ErrorType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ErrorType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ErrorType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ErrorType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ErrorType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Number is not None: + namespaceprefix_ = self.Number_nsprefix_ + ':' if (UseCapturedNS_ and self.Number_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNumber>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Number, input_name='Number'), namespaceprefix_ , eol_)) + if self.Source is not None: + namespaceprefix_ = self.Source_nsprefix_ + ':' if (UseCapturedNS_ and self.Source_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSource>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Source), input_name='Source')), namespaceprefix_ , eol_)) + if self.Description is not None: + namespaceprefix_ = self.Description_nsprefix_ + ':' if (UseCapturedNS_ and self.Description_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDescription>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Description), input_name='Description')), namespaceprefix_ , eol_)) + if self.HelpFile is not None: + namespaceprefix_ = self.HelpFile_nsprefix_ + ':' if (UseCapturedNS_ and self.HelpFile_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHelpFile>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.HelpFile), input_name='HelpFile')), namespaceprefix_ , eol_)) + if self.HelpContext is not None: + namespaceprefix_ = self.HelpContext_nsprefix_ + ':' if (UseCapturedNS_ and self.HelpContext_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHelpContext>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.HelpContext, input_name='HelpContext'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Number' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Number') + ival_ = self.gds_validate_integer(ival_, node, 'Number') + self.Number = ival_ + self.Number_nsprefix_ = child_.prefix + elif nodeName_ == 'Source': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Source') + value_ = self.gds_validate_string(value_, node, 'Source') + self.Source = value_ + self.Source_nsprefix_ = child_.prefix + elif nodeName_ == 'Description': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Description') + value_ = self.gds_validate_string(value_, node, 'Description') + self.Description = value_ + self.Description_nsprefix_ = child_.prefix + elif nodeName_ == 'HelpFile': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'HelpFile') + value_ = self.gds_validate_string(value_, node, 'HelpFile') + self.HelpFile = value_ + self.HelpFile_nsprefix_ = child_.prefix + elif nodeName_ == 'HelpContext' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'HelpContext') + ival_ = self.gds_validate_integer(ival_, node, 'HelpContext') + self.HelpContext = ival_ + self.HelpContext_nsprefix_ = child_.prefix +# end class ErrorType + + +class PostageType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, CLASSID=None, MailService=None, Rate=None, CommercialRate=None, CommercialPlusRate=None, CommitmentDate=None, CommitmentName=None, MaxDimensions=None, ServiceInformation=None, SpecialServices=None, Zone=None, valueOf_=None, mixedclass_=None, content_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.CLASSID = _cast(int, CLASSID) + self.CLASSID_nsprefix_ = None + self.MailService = MailService + self.MailService_nsprefix_ = None + self.Rate = Rate + self.Rate_nsprefix_ = None + self.CommercialRate = CommercialRate + self.CommercialRate_nsprefix_ = None + self.CommercialPlusRate = CommercialPlusRate + self.CommercialPlusRate_nsprefix_ = None + self.CommitmentDate = CommitmentDate + self.CommitmentDate_nsprefix_ = None + self.CommitmentName = CommitmentName + self.CommitmentName_nsprefix_ = None + self.MaxDimensions = MaxDimensions + self.MaxDimensions_nsprefix_ = None + self.ServiceInformation = ServiceInformation + self.ServiceInformation_nsprefix_ = None + self.SpecialServices = SpecialServices + self.SpecialServices_nsprefix_ = None + self.Zone = Zone + self.Zone_nsprefix_ = None + self.valueOf_ = valueOf_ + if mixedclass_ is None: + self.mixedclass_ = MixedContainer + else: + self.mixedclass_ = mixedclass_ + if content_ is None: + self.content_ = [] + else: + self.content_ = content_ + self.valueOf_ = valueOf_ + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PostageType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PostageType.subclass: + return PostageType.subclass(*args_, **kwargs_) + else: + return PostageType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_MailService(self): + return self.MailService + def set_MailService(self, MailService): + self.MailService = MailService + def get_Rate(self): + return self.Rate + def set_Rate(self, Rate): + self.Rate = Rate + def get_CommercialRate(self): + return self.CommercialRate + def set_CommercialRate(self, CommercialRate): + self.CommercialRate = CommercialRate + def get_CommercialPlusRate(self): + return self.CommercialPlusRate + def set_CommercialPlusRate(self, CommercialPlusRate): + self.CommercialPlusRate = CommercialPlusRate + def get_CommitmentDate(self): + return self.CommitmentDate + def set_CommitmentDate(self, CommitmentDate): + self.CommitmentDate = CommitmentDate + def get_CommitmentName(self): + return self.CommitmentName + def set_CommitmentName(self, CommitmentName): + self.CommitmentName = CommitmentName + def get_MaxDimensions(self): + return self.MaxDimensions + def set_MaxDimensions(self, MaxDimensions): + self.MaxDimensions = MaxDimensions + def get_ServiceInformation(self): + return self.ServiceInformation + def set_ServiceInformation(self, ServiceInformation): + self.ServiceInformation = ServiceInformation + def get_SpecialServices(self): + return self.SpecialServices + def set_SpecialServices(self, SpecialServices): + self.SpecialServices = SpecialServices + def get_Zone(self): + return self.Zone + def set_Zone(self, Zone): + self.Zone = Zone + def get_CLASSID(self): + return self.CLASSID + def set_CLASSID(self, CLASSID): + self.CLASSID = CLASSID + def get_valueOf_(self): return self.valueOf_ + def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_ + def has__content(self): + if ( + self.MailService is not None or + self.Rate is not None or + self.CommercialRate is not None or + self.CommercialPlusRate is not None or + self.CommitmentDate is not None or + self.CommitmentName is not None or + self.MaxDimensions is not None or + self.ServiceInformation is not None or + self.SpecialServices is not None or + self.Zone is not None or + (1 if type(self.valueOf_) in [int,float] else self.valueOf_) or + self.content_ + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PostageType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PostageType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PostageType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PostageType') + outfile.write('>') + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_, pretty_print=pretty_print) + outfile.write(self.convert_unicode(self.valueOf_)) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PostageType'): + if self.CLASSID is not None and 'CLASSID' not in already_processed: + already_processed.add('CLASSID') + outfile.write(' CLASSID="%s"' % self.gds_format_integer(self.CLASSID, input_name='CLASSID')) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PostageType', fromsubclass_=False, pretty_print=True): + if not fromsubclass_: + for item_ in self.content_: + item_.export(outfile, level, item_.name, namespaceprefix_, pretty_print=pretty_print) + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.MailService is not None: + namespaceprefix_ = self.MailService_nsprefix_ + ':' if (UseCapturedNS_ and self.MailService_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMailService>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MailService), input_name='MailService')), namespaceprefix_ , eol_)) + if self.Rate is not None: + namespaceprefix_ = self.Rate_nsprefix_ + ':' if (UseCapturedNS_ and self.Rate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRate>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Rate, input_name='Rate'), namespaceprefix_ , eol_)) + if self.CommercialRate is not None: + namespaceprefix_ = self.CommercialRate_nsprefix_ + ':' if (UseCapturedNS_ and self.CommercialRate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommercialRate>%s%s' % (namespaceprefix_ , self.gds_format_float(self.CommercialRate, input_name='CommercialRate'), namespaceprefix_ , eol_)) + if self.CommercialPlusRate is not None: + namespaceprefix_ = self.CommercialPlusRate_nsprefix_ + ':' if (UseCapturedNS_ and self.CommercialPlusRate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommercialPlusRate>%s%s' % (namespaceprefix_ , self.gds_format_float(self.CommercialPlusRate, input_name='CommercialPlusRate'), namespaceprefix_ , eol_)) + if self.CommitmentDate is not None: + namespaceprefix_ = self.CommitmentDate_nsprefix_ + ':' if (UseCapturedNS_ and self.CommitmentDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommitmentDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CommitmentDate), input_name='CommitmentDate')), namespaceprefix_ , eol_)) + if self.CommitmentName is not None: + namespaceprefix_ = self.CommitmentName_nsprefix_ + ':' if (UseCapturedNS_ and self.CommitmentName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommitmentName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CommitmentName), input_name='CommitmentName')), namespaceprefix_ , eol_)) + if self.MaxDimensions is not None: + namespaceprefix_ = self.MaxDimensions_nsprefix_ + ':' if (UseCapturedNS_ and self.MaxDimensions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMaxDimensions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MaxDimensions), input_name='MaxDimensions')), namespaceprefix_ , eol_)) + if self.ServiceInformation is not None: + namespaceprefix_ = self.ServiceInformation_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceInformation_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceInformation>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceInformation), input_name='ServiceInformation')), namespaceprefix_ , eol_)) + if self.SpecialServices is not None: + namespaceprefix_ = self.SpecialServices_nsprefix_ + ':' if (UseCapturedNS_ and self.SpecialServices_nsprefix_) else '' + self.SpecialServices.export(outfile, level, namespaceprefix_, namespacedef_='', name_='SpecialServices', pretty_print=pretty_print) + if self.Zone is not None: + namespaceprefix_ = self.Zone_nsprefix_ + ':' if (UseCapturedNS_ and self.Zone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Zone), input_name='Zone')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + if node.text is not None: + obj_ = self.mixedclass_(MixedContainer.CategoryText, + MixedContainer.TypeNone, '', node.text) + self.content_.append(obj_) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('CLASSID', node) + if value is not None and 'CLASSID' not in already_processed: + already_processed.add('CLASSID') + self.CLASSID = self.gds_parse_integer(value, node, 'CLASSID') + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'MailService' and child_.text is not None: + valuestr_ = child_.text + valuestr_ = self.gds_parse_string(valuestr_, node, 'MailService') + valuestr_ = self.gds_validate_string(valuestr_, node, 'MailService') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeString, 'MailService', valuestr_) + self.content_.append(obj_) + self.MailService_nsprefix_ = child_.prefix + elif nodeName_ == 'Rate' and child_.text is not None: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Rate') + fval_ = self.gds_validate_float(fval_, node, 'Rate') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeFloat, 'Rate', fval_) + self.content_.append(obj_) + self.Rate_nsprefix_ = child_.prefix + elif nodeName_ == 'CommercialRate' and child_.text is not None: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'CommercialRate') + fval_ = self.gds_validate_float(fval_, node, 'CommercialRate') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeFloat, 'CommercialRate', fval_) + self.content_.append(obj_) + self.CommercialRate_nsprefix_ = child_.prefix + elif nodeName_ == 'CommercialPlusRate' and child_.text is not None: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'CommercialPlusRate') + fval_ = self.gds_validate_float(fval_, node, 'CommercialPlusRate') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeFloat, 'CommercialPlusRate', fval_) + self.content_.append(obj_) + self.CommercialPlusRate_nsprefix_ = child_.prefix + elif nodeName_ == 'CommitmentDate' and child_.text is not None: + valuestr_ = child_.text + valuestr_ = self.gds_parse_string(valuestr_, node, 'CommitmentDate') + valuestr_ = self.gds_validate_string(valuestr_, node, 'CommitmentDate') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeString, 'CommitmentDate', valuestr_) + self.content_.append(obj_) + self.CommitmentDate_nsprefix_ = child_.prefix + elif nodeName_ == 'CommitmentName' and child_.text is not None: + valuestr_ = child_.text + valuestr_ = self.gds_parse_string(valuestr_, node, 'CommitmentName') + valuestr_ = self.gds_validate_string(valuestr_, node, 'CommitmentName') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeString, 'CommitmentName', valuestr_) + self.content_.append(obj_) + self.CommitmentName_nsprefix_ = child_.prefix + elif nodeName_ == 'MaxDimensions' and child_.text is not None: + valuestr_ = child_.text + valuestr_ = self.gds_parse_string(valuestr_, node, 'MaxDimensions') + valuestr_ = self.gds_validate_string(valuestr_, node, 'MaxDimensions') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeString, 'MaxDimensions', valuestr_) + self.content_.append(obj_) + self.MaxDimensions_nsprefix_ = child_.prefix + elif nodeName_ == 'ServiceInformation' and child_.text is not None: + valuestr_ = child_.text + valuestr_ = self.gds_parse_string(valuestr_, node, 'ServiceInformation') + valuestr_ = self.gds_validate_string(valuestr_, node, 'ServiceInformation') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeString, 'ServiceInformation', valuestr_) + self.content_.append(obj_) + self.ServiceInformation_nsprefix_ = child_.prefix + elif nodeName_ == 'SpecialServices': + obj_ = SpecialServicesType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + obj_ = self.mixedclass_(MixedContainer.CategoryComplex, + MixedContainer.TypeNone, 'SpecialServices', obj_) + self.content_.append(obj_) + if hasattr(self, 'add_SpecialServices'): + self.add_SpecialServices(obj_.value) + elif hasattr(self, 'set_SpecialServices'): + self.set_SpecialServices(obj_.value) + elif nodeName_ == 'Zone' and child_.text is not None: + valuestr_ = child_.text + valuestr_ = self.gds_parse_string(valuestr_, node, 'Zone') + valuestr_ = self.gds_validate_string(valuestr_, node, 'Zone') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeString, 'Zone', valuestr_) + self.content_.append(obj_) + self.Zone_nsprefix_ = child_.prefix + if not fromsubclass_ and child_.tail is not None: + obj_ = self.mixedclass_(MixedContainer.CategoryText, + MixedContainer.TypeNone, '', child_.tail) + self.content_.append(obj_) +# end class PostageType + + +class SpecialServicesType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, SpecialService=None, valueOf_=None, mixedclass_=None, content_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if SpecialService is None: + self.SpecialService = [] + else: + self.SpecialService = SpecialService + self.SpecialService_nsprefix_ = None + self.valueOf_ = valueOf_ + if mixedclass_ is None: + self.mixedclass_ = MixedContainer + else: + self.mixedclass_ = mixedclass_ + if content_ is None: + self.content_ = [] + else: + self.content_ = content_ + self.valueOf_ = valueOf_ + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, SpecialServicesType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if SpecialServicesType.subclass: + return SpecialServicesType.subclass(*args_, **kwargs_) + else: + return SpecialServicesType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_SpecialService(self): + return self.SpecialService + def set_SpecialService(self, SpecialService): + self.SpecialService = SpecialService + def add_SpecialService(self, value): + self.SpecialService.append(value) + def insert_SpecialService_at(self, index, value): + self.SpecialService.insert(index, value) + def replace_SpecialService_at(self, index, value): + self.SpecialService[index] = value + def get_valueOf_(self): return self.valueOf_ + def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_ + def has__content(self): + if ( + self.SpecialService or + (1 if type(self.valueOf_) in [int,float] else self.valueOf_) or + self.content_ + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpecialServicesType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('SpecialServicesType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'SpecialServicesType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpecialServicesType') + outfile.write('>') + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_, pretty_print=pretty_print) + outfile.write(self.convert_unicode(self.valueOf_)) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpecialServicesType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpecialServicesType', fromsubclass_=False, pretty_print=True): + if not fromsubclass_: + for item_ in self.content_: + item_.export(outfile, level, item_.name, namespaceprefix_, pretty_print=pretty_print) + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for SpecialService_ in self.SpecialService: + namespaceprefix_ = self.SpecialService_nsprefix_ + ':' if (UseCapturedNS_ and self.SpecialService_nsprefix_) else '' + SpecialService_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='SpecialService', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + if node.text is not None: + obj_ = self.mixedclass_(MixedContainer.CategoryText, + MixedContainer.TypeNone, '', node.text) + self.content_.append(obj_) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'SpecialService': + obj_ = SpecialServiceType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + obj_ = self.mixedclass_(MixedContainer.CategoryComplex, + MixedContainer.TypeNone, 'SpecialService', obj_) + self.content_.append(obj_) + if hasattr(self, 'add_SpecialService'): + self.add_SpecialService(obj_.value) + elif hasattr(self, 'set_SpecialService'): + self.set_SpecialService(obj_.value) + if not fromsubclass_ and child_.tail is not None: + obj_ = self.mixedclass_(MixedContainer.CategoryText, + MixedContainer.TypeNone, '', child_.tail) + self.content_.append(obj_) +# end class SpecialServicesType + + +class SpecialServiceType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ServiceID=None, ServiceName=None, Available=None, AvailableOnline=None, AvailableCPP=None, Price=None, PriceOnline=None, PriceCPP=None, DeclaredValueRequired=None, DueSenderRequired=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ServiceID = ServiceID + self.ServiceID_nsprefix_ = None + self.ServiceName = ServiceName + self.ServiceName_nsprefix_ = None + self.Available = Available + self.Available_nsprefix_ = None + self.AvailableOnline = AvailableOnline + self.AvailableOnline_nsprefix_ = None + self.AvailableCPP = AvailableCPP + self.AvailableCPP_nsprefix_ = None + self.Price = Price + self.Price_nsprefix_ = None + self.PriceOnline = PriceOnline + self.PriceOnline_nsprefix_ = None + self.PriceCPP = PriceCPP + self.PriceCPP_nsprefix_ = None + self.DeclaredValueRequired = DeclaredValueRequired + self.DeclaredValueRequired_nsprefix_ = None + self.DueSenderRequired = DueSenderRequired + self.DueSenderRequired_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, SpecialServiceType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if SpecialServiceType.subclass: + return SpecialServiceType.subclass(*args_, **kwargs_) + else: + return SpecialServiceType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ServiceID(self): + return self.ServiceID + def set_ServiceID(self, ServiceID): + self.ServiceID = ServiceID + def get_ServiceName(self): + return self.ServiceName + def set_ServiceName(self, ServiceName): + self.ServiceName = ServiceName + def get_Available(self): + return self.Available + def set_Available(self, Available): + self.Available = Available + def get_AvailableOnline(self): + return self.AvailableOnline + def set_AvailableOnline(self, AvailableOnline): + self.AvailableOnline = AvailableOnline + def get_AvailableCPP(self): + return self.AvailableCPP + def set_AvailableCPP(self, AvailableCPP): + self.AvailableCPP = AvailableCPP + def get_Price(self): + return self.Price + def set_Price(self, Price): + self.Price = Price + def get_PriceOnline(self): + return self.PriceOnline + def set_PriceOnline(self, PriceOnline): + self.PriceOnline = PriceOnline + def get_PriceCPP(self): + return self.PriceCPP + def set_PriceCPP(self, PriceCPP): + self.PriceCPP = PriceCPP + def get_DeclaredValueRequired(self): + return self.DeclaredValueRequired + def set_DeclaredValueRequired(self, DeclaredValueRequired): + self.DeclaredValueRequired = DeclaredValueRequired + def get_DueSenderRequired(self): + return self.DueSenderRequired + def set_DueSenderRequired(self, DueSenderRequired): + self.DueSenderRequired = DueSenderRequired + def has__content(self): + if ( + self.ServiceID is not None or + self.ServiceName is not None or + self.Available is not None or + self.AvailableOnline is not None or + self.AvailableCPP is not None or + self.Price is not None or + self.PriceOnline is not None or + self.PriceCPP is not None or + self.DeclaredValueRequired is not None or + self.DueSenderRequired is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpecialServiceType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('SpecialServiceType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'SpecialServiceType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpecialServiceType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpecialServiceType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpecialServiceType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpecialServiceType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ServiceID is not None: + namespaceprefix_ = self.ServiceID_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceID>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ServiceID, input_name='ServiceID'), namespaceprefix_ , eol_)) + if self.ServiceName is not None: + namespaceprefix_ = self.ServiceName_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceName), input_name='ServiceName')), namespaceprefix_ , eol_)) + if self.Available is not None: + namespaceprefix_ = self.Available_nsprefix_ + ':' if (UseCapturedNS_ and self.Available_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAvailable>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.Available, input_name='Available'), namespaceprefix_ , eol_)) + if self.AvailableOnline is not None: + namespaceprefix_ = self.AvailableOnline_nsprefix_ + ':' if (UseCapturedNS_ and self.AvailableOnline_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAvailableOnline>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.AvailableOnline, input_name='AvailableOnline'), namespaceprefix_ , eol_)) + if self.AvailableCPP is not None: + namespaceprefix_ = self.AvailableCPP_nsprefix_ + ':' if (UseCapturedNS_ and self.AvailableCPP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAvailableCPP>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.AvailableCPP, input_name='AvailableCPP'), namespaceprefix_ , eol_)) + if self.Price is not None: + namespaceprefix_ = self.Price_nsprefix_ + ':' if (UseCapturedNS_ and self.Price_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPrice>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Price, input_name='Price'), namespaceprefix_ , eol_)) + if self.PriceOnline is not None: + namespaceprefix_ = self.PriceOnline_nsprefix_ + ':' if (UseCapturedNS_ and self.PriceOnline_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPriceOnline>%s%s' % (namespaceprefix_ , self.gds_format_float(self.PriceOnline, input_name='PriceOnline'), namespaceprefix_ , eol_)) + if self.PriceCPP is not None: + namespaceprefix_ = self.PriceCPP_nsprefix_ + ':' if (UseCapturedNS_ and self.PriceCPP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPriceCPP>%s%s' % (namespaceprefix_ , self.gds_format_float(self.PriceCPP, input_name='PriceCPP'), namespaceprefix_ , eol_)) + if self.DeclaredValueRequired is not None: + namespaceprefix_ = self.DeclaredValueRequired_nsprefix_ + ':' if (UseCapturedNS_ and self.DeclaredValueRequired_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDeclaredValueRequired>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.DeclaredValueRequired, input_name='DeclaredValueRequired'), namespaceprefix_ , eol_)) + if self.DueSenderRequired is not None: + namespaceprefix_ = self.DueSenderRequired_nsprefix_ + ':' if (UseCapturedNS_ and self.DueSenderRequired_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDueSenderRequired>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.DueSenderRequired, input_name='DueSenderRequired'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ServiceID' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ServiceID') + ival_ = self.gds_validate_integer(ival_, node, 'ServiceID') + self.ServiceID = ival_ + self.ServiceID_nsprefix_ = child_.prefix + elif nodeName_ == 'ServiceName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceName') + value_ = self.gds_validate_string(value_, node, 'ServiceName') + self.ServiceName = value_ + self.ServiceName_nsprefix_ = child_.prefix + elif nodeName_ == 'Available': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'Available') + ival_ = self.gds_validate_boolean(ival_, node, 'Available') + self.Available = ival_ + self.Available_nsprefix_ = child_.prefix + elif nodeName_ == 'AvailableOnline': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'AvailableOnline') + ival_ = self.gds_validate_boolean(ival_, node, 'AvailableOnline') + self.AvailableOnline = ival_ + self.AvailableOnline_nsprefix_ = child_.prefix + elif nodeName_ == 'AvailableCPP': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'AvailableCPP') + ival_ = self.gds_validate_boolean(ival_, node, 'AvailableCPP') + self.AvailableCPP = ival_ + self.AvailableCPP_nsprefix_ = child_.prefix + elif nodeName_ == 'Price' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Price') + fval_ = self.gds_validate_float(fval_, node, 'Price') + self.Price = fval_ + self.Price_nsprefix_ = child_.prefix + elif nodeName_ == 'PriceOnline' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'PriceOnline') + fval_ = self.gds_validate_float(fval_, node, 'PriceOnline') + self.PriceOnline = fval_ + self.PriceOnline_nsprefix_ = child_.prefix + elif nodeName_ == 'PriceCPP' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'PriceCPP') + fval_ = self.gds_validate_float(fval_, node, 'PriceCPP') + self.PriceCPP = fval_ + self.PriceCPP_nsprefix_ = child_.prefix + elif nodeName_ == 'DeclaredValueRequired': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'DeclaredValueRequired') + ival_ = self.gds_validate_boolean(ival_, node, 'DeclaredValueRequired') + self.DeclaredValueRequired = ival_ + self.DeclaredValueRequired_nsprefix_ = child_.prefix + elif nodeName_ == 'DueSenderRequired': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'DueSenderRequired') + ival_ = self.gds_validate_boolean(ival_, node, 'DueSenderRequired') + self.DueSenderRequired = ival_ + self.DueSenderRequired_nsprefix_ = child_.prefix +# end class SpecialServiceType + + +class RestrictionType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Restrictions=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Restrictions = Restrictions + self.Restrictions_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, RestrictionType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if RestrictionType.subclass: + return RestrictionType.subclass(*args_, **kwargs_) + else: + return RestrictionType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Restrictions(self): + return self.Restrictions + def set_Restrictions(self, Restrictions): + self.Restrictions = Restrictions + def has__content(self): + if ( + self.Restrictions is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RestrictionType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('RestrictionType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'RestrictionType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RestrictionType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RestrictionType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RestrictionType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RestrictionType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Restrictions is not None: + namespaceprefix_ = self.Restrictions_nsprefix_ + ':' if (UseCapturedNS_ and self.Restrictions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRestrictions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Restrictions), input_name='Restrictions')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Restrictions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Restrictions') + value_ = self.gds_validate_string(value_, node, 'Restrictions') + self.Restrictions = value_ + self.Restrictions_nsprefix_ = child_.prefix +# end class RestrictionType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RateV4Response' + rootClass = RateV4Response + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RateV4Response' + rootClass = RateV4Response + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RateV4Response' + rootClass = RateV4Response + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RateV4Response' + rootClass = RateV4Response + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from rate_v4_response import *\n\n') + sys.stdout.write('import rate_v4_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "ErrorType", + "PackageType", + "PostageType", + "RateV4Response", + "RestrictionType", + "SpecialServiceType", + "SpecialServicesType" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/scan_request.py b/modules/connectors/usps/karrio/schemas/usps/scan_request.py new file mode 100644 index 0000000000..0afb931e2b --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/scan_request.py @@ -0,0 +1,1855 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:45 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/scan_request.py') +# +# Command line arguments: +# ./schemas/SCANRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/scan_request.py" ./schemas/SCANRequest.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class SCANRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, Option=None, FromName=None, FromFirm=None, FromAddress1=None, FromAddress2=None, FromCity=None, FromState=None, FromZip5=None, FromZip4=None, Shipment=None, MailDate=None, MailTime=None, EntryFacility=None, ImageType=None, CustomerRefNo=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.Option = Option + self.Option_nsprefix_ = None + self.FromName = FromName + self.FromName_nsprefix_ = None + self.FromFirm = FromFirm + self.FromFirm_nsprefix_ = None + self.FromAddress1 = FromAddress1 + self.FromAddress1_nsprefix_ = None + self.FromAddress2 = FromAddress2 + self.FromAddress2_nsprefix_ = None + self.FromCity = FromCity + self.FromCity_nsprefix_ = None + self.FromState = FromState + self.FromState_nsprefix_ = None + self.FromZip5 = FromZip5 + self.FromZip5_nsprefix_ = None + self.FromZip4 = FromZip4 + self.FromZip4_nsprefix_ = None + self.Shipment = Shipment + self.Shipment_nsprefix_ = None + self.MailDate = MailDate + self.MailDate_nsprefix_ = None + self.MailTime = MailTime + self.MailTime_nsprefix_ = None + self.EntryFacility = EntryFacility + self.EntryFacility_nsprefix_ = None + self.ImageType = ImageType + self.ImageType_nsprefix_ = None + self.CustomerRefNo = CustomerRefNo + self.CustomerRefNo_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, SCANRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if SCANRequest.subclass: + return SCANRequest.subclass(*args_, **kwargs_) + else: + return SCANRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Option(self): + return self.Option + def set_Option(self, Option): + self.Option = Option + def get_FromName(self): + return self.FromName + def set_FromName(self, FromName): + self.FromName = FromName + def get_FromFirm(self): + return self.FromFirm + def set_FromFirm(self, FromFirm): + self.FromFirm = FromFirm + def get_FromAddress1(self): + return self.FromAddress1 + def set_FromAddress1(self, FromAddress1): + self.FromAddress1 = FromAddress1 + def get_FromAddress2(self): + return self.FromAddress2 + def set_FromAddress2(self, FromAddress2): + self.FromAddress2 = FromAddress2 + def get_FromCity(self): + return self.FromCity + def set_FromCity(self, FromCity): + self.FromCity = FromCity + def get_FromState(self): + return self.FromState + def set_FromState(self, FromState): + self.FromState = FromState + def get_FromZip5(self): + return self.FromZip5 + def set_FromZip5(self, FromZip5): + self.FromZip5 = FromZip5 + def get_FromZip4(self): + return self.FromZip4 + def set_FromZip4(self, FromZip4): + self.FromZip4 = FromZip4 + def get_Shipment(self): + return self.Shipment + def set_Shipment(self, Shipment): + self.Shipment = Shipment + def get_MailDate(self): + return self.MailDate + def set_MailDate(self, MailDate): + self.MailDate = MailDate + def get_MailTime(self): + return self.MailTime + def set_MailTime(self, MailTime): + self.MailTime = MailTime + def get_EntryFacility(self): + return self.EntryFacility + def set_EntryFacility(self, EntryFacility): + self.EntryFacility = EntryFacility + def get_ImageType(self): + return self.ImageType + def set_ImageType(self, ImageType): + self.ImageType = ImageType + def get_CustomerRefNo(self): + return self.CustomerRefNo + def set_CustomerRefNo(self, CustomerRefNo): + self.CustomerRefNo = CustomerRefNo + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.Option is not None or + self.FromName is not None or + self.FromFirm is not None or + self.FromAddress1 is not None or + self.FromAddress2 is not None or + self.FromCity is not None or + self.FromState is not None or + self.FromZip5 is not None or + self.FromZip4 is not None or + self.Shipment is not None or + self.MailDate is not None or + self.MailTime is not None or + self.EntryFacility is not None or + self.ImageType is not None or + self.CustomerRefNo is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SCANRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('SCANRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'SCANRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SCANRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SCANRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SCANRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SCANRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Option is not None: + namespaceprefix_ = self.Option_nsprefix_ + ':' if (UseCapturedNS_ and self.Option_nsprefix_) else '' + self.Option.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Option', pretty_print=pretty_print) + if self.FromName is not None: + namespaceprefix_ = self.FromName_nsprefix_ + ':' if (UseCapturedNS_ and self.FromName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromName), input_name='FromName')), namespaceprefix_ , eol_)) + if self.FromFirm is not None: + namespaceprefix_ = self.FromFirm_nsprefix_ + ':' if (UseCapturedNS_ and self.FromFirm_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromFirm>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromFirm), input_name='FromFirm')), namespaceprefix_ , eol_)) + if self.FromAddress1 is not None: + namespaceprefix_ = self.FromAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.FromAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromAddress1), input_name='FromAddress1')), namespaceprefix_ , eol_)) + if self.FromAddress2 is not None: + namespaceprefix_ = self.FromAddress2_nsprefix_ + ':' if (UseCapturedNS_ and self.FromAddress2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromAddress2), input_name='FromAddress2')), namespaceprefix_ , eol_)) + if self.FromCity is not None: + namespaceprefix_ = self.FromCity_nsprefix_ + ':' if (UseCapturedNS_ and self.FromCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromCity), input_name='FromCity')), namespaceprefix_ , eol_)) + if self.FromState is not None: + namespaceprefix_ = self.FromState_nsprefix_ + ':' if (UseCapturedNS_ and self.FromState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromState), input_name='FromState')), namespaceprefix_ , eol_)) + if self.FromZip5 is not None: + namespaceprefix_ = self.FromZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.FromZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromZip5>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.FromZip5, input_name='FromZip5'), namespaceprefix_ , eol_)) + if self.FromZip4 is not None: + namespaceprefix_ = self.FromZip4_nsprefix_ + ':' if (UseCapturedNS_ and self.FromZip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromZip4>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.FromZip4, input_name='FromZip4'), namespaceprefix_ , eol_)) + if self.Shipment is not None: + namespaceprefix_ = self.Shipment_nsprefix_ + ':' if (UseCapturedNS_ and self.Shipment_nsprefix_) else '' + self.Shipment.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Shipment', pretty_print=pretty_print) + if self.MailDate is not None: + namespaceprefix_ = self.MailDate_nsprefix_ + ':' if (UseCapturedNS_ and self.MailDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMailDate>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.MailDate, input_name='MailDate'), namespaceprefix_ , eol_)) + if self.MailTime is not None: + namespaceprefix_ = self.MailTime_nsprefix_ + ':' if (UseCapturedNS_ and self.MailTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMailTime>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.MailTime, input_name='MailTime'), namespaceprefix_ , eol_)) + if self.EntryFacility is not None: + namespaceprefix_ = self.EntryFacility_nsprefix_ + ':' if (UseCapturedNS_ and self.EntryFacility_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEntryFacility>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.EntryFacility, input_name='EntryFacility'), namespaceprefix_ , eol_)) + if self.ImageType is not None: + namespaceprefix_ = self.ImageType_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageType), input_name='ImageType')), namespaceprefix_ , eol_)) + if self.CustomerRefNo is not None: + namespaceprefix_ = self.CustomerRefNo_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerRefNo_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerRefNo>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerRefNo), input_name='CustomerRefNo')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Option': + obj_ = OptionType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Option = obj_ + obj_.original_tagname_ = 'Option' + elif nodeName_ == 'FromName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromName') + value_ = self.gds_validate_string(value_, node, 'FromName') + self.FromName = value_ + self.FromName_nsprefix_ = child_.prefix + elif nodeName_ == 'FromFirm': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromFirm') + value_ = self.gds_validate_string(value_, node, 'FromFirm') + self.FromFirm = value_ + self.FromFirm_nsprefix_ = child_.prefix + elif nodeName_ == 'FromAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromAddress1') + value_ = self.gds_validate_string(value_, node, 'FromAddress1') + self.FromAddress1 = value_ + self.FromAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'FromAddress2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromAddress2') + value_ = self.gds_validate_string(value_, node, 'FromAddress2') + self.FromAddress2 = value_ + self.FromAddress2_nsprefix_ = child_.prefix + elif nodeName_ == 'FromCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromCity') + value_ = self.gds_validate_string(value_, node, 'FromCity') + self.FromCity = value_ + self.FromCity_nsprefix_ = child_.prefix + elif nodeName_ == 'FromState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromState') + value_ = self.gds_validate_string(value_, node, 'FromState') + self.FromState = value_ + self.FromState_nsprefix_ = child_.prefix + elif nodeName_ == 'FromZip5' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'FromZip5') + ival_ = self.gds_validate_integer(ival_, node, 'FromZip5') + self.FromZip5 = ival_ + self.FromZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'FromZip4' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'FromZip4') + ival_ = self.gds_validate_integer(ival_, node, 'FromZip4') + self.FromZip4 = ival_ + self.FromZip4_nsprefix_ = child_.prefix + elif nodeName_ == 'Shipment': + obj_ = ShipmentType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Shipment = obj_ + obj_.original_tagname_ = 'Shipment' + elif nodeName_ == 'MailDate' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'MailDate') + ival_ = self.gds_validate_integer(ival_, node, 'MailDate') + self.MailDate = ival_ + self.MailDate_nsprefix_ = child_.prefix + elif nodeName_ == 'MailTime' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'MailTime') + ival_ = self.gds_validate_integer(ival_, node, 'MailTime') + self.MailTime = ival_ + self.MailTime_nsprefix_ = child_.prefix + elif nodeName_ == 'EntryFacility' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'EntryFacility') + ival_ = self.gds_validate_integer(ival_, node, 'EntryFacility') + self.EntryFacility = ival_ + self.EntryFacility_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageType') + value_ = self.gds_validate_string(value_, node, 'ImageType') + self.ImageType = value_ + self.ImageType_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerRefNo': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerRefNo') + value_ = self.gds_validate_string(value_, node, 'CustomerRefNo') + self.CustomerRefNo = value_ + self.CustomerRefNo_nsprefix_ = child_.prefix +# end class SCANRequest + + +class OptionType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Form=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Form = Form + self.Form_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, OptionType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if OptionType.subclass: + return OptionType.subclass(*args_, **kwargs_) + else: + return OptionType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Form(self): + return self.Form + def set_Form(self, Form): + self.Form = Form + def has__content(self): + if ( + self.Form is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='OptionType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('OptionType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'OptionType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='OptionType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='OptionType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='OptionType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='OptionType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Form is not None: + namespaceprefix_ = self.Form_nsprefix_ + ':' if (UseCapturedNS_ and self.Form_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sForm>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Form, input_name='Form'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Form' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Form') + ival_ = self.gds_validate_integer(ival_, node, 'Form') + self.Form = ival_ + self.Form_nsprefix_ = child_.prefix +# end class OptionType + + +class ShipmentType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, PackageDetail=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.PackageDetail = PackageDetail + self.PackageDetail_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ShipmentType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ShipmentType.subclass: + return ShipmentType.subclass(*args_, **kwargs_) + else: + return ShipmentType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_PackageDetail(self): + return self.PackageDetail + def set_PackageDetail(self, PackageDetail): + self.PackageDetail = PackageDetail + def has__content(self): + if ( + self.PackageDetail is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ShipmentType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ShipmentType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ShipmentType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ShipmentType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ShipmentType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ShipmentType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ShipmentType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.PackageDetail is not None: + namespaceprefix_ = self.PackageDetail_nsprefix_ + ':' if (UseCapturedNS_ and self.PackageDetail_nsprefix_) else '' + self.PackageDetail.export(outfile, level, namespaceprefix_, namespacedef_='', name_='PackageDetail', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'PackageDetail': + obj_ = PackageDetailType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.PackageDetail = obj_ + obj_.original_tagname_ = 'PackageDetail' +# end class ShipmentType + + +class PackageDetailType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, PkgBarcode=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.PkgBarcode = PkgBarcode + self.PkgBarcode_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PackageDetailType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PackageDetailType.subclass: + return PackageDetailType.subclass(*args_, **kwargs_) + else: + return PackageDetailType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_PkgBarcode(self): + return self.PkgBarcode + def set_PkgBarcode(self, PkgBarcode): + self.PkgBarcode = PkgBarcode + def has__content(self): + if ( + self.PkgBarcode is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageDetailType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PackageDetailType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PackageDetailType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PackageDetailType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PackageDetailType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PackageDetailType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageDetailType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.PkgBarcode is not None: + namespaceprefix_ = self.PkgBarcode_nsprefix_ + ':' if (UseCapturedNS_ and self.PkgBarcode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPkgBarcode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PkgBarcode), input_name='PkgBarcode')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'PkgBarcode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PkgBarcode') + value_ = self.gds_validate_string(value_, node, 'PkgBarcode') + self.PkgBarcode = value_ + self.PkgBarcode_nsprefix_ = child_.prefix +# end class PackageDetailType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SCANRequest' + rootClass = SCANRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SCANRequest' + rootClass = SCANRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SCANRequest' + rootClass = SCANRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SCANRequest' + rootClass = SCANRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from scan_request import *\n\n') + sys.stdout.write('import scan_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "OptionType", + "PackageDetailType", + "SCANRequest", + "ShipmentType" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/scan_response.py b/modules/connectors/usps/karrio/schemas/usps/scan_response.py new file mode 100644 index 0000000000..00567f7d33 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/scan_response.py @@ -0,0 +1,1536 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:45 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/scan_response.py') +# +# Command line arguments: +# ./schemas/SCANResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/scan_response.py" ./schemas/SCANResponse.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class SCANResponse(GeneratedsSuper): + """SCANFormImage -- 65255 skipped + + """ + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, SCANFormNumber=None, SCANFormImage=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.SCANFormNumber = SCANFormNumber + self.SCANFormNumber_nsprefix_ = None + self.SCANFormImage = SCANFormImage + self.SCANFormImage_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, SCANResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if SCANResponse.subclass: + return SCANResponse.subclass(*args_, **kwargs_) + else: + return SCANResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_SCANFormNumber(self): + return self.SCANFormNumber + def set_SCANFormNumber(self, SCANFormNumber): + self.SCANFormNumber = SCANFormNumber + def get_SCANFormImage(self): + return self.SCANFormImage + def set_SCANFormImage(self, SCANFormImage): + self.SCANFormImage = SCANFormImage + def has__content(self): + if ( + self.SCANFormNumber is not None or + self.SCANFormImage is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SCANResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('SCANResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'SCANResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SCANResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SCANResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SCANResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SCANResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.SCANFormNumber is not None: + namespaceprefix_ = self.SCANFormNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.SCANFormNumber_nsprefix_) else '' + self.SCANFormNumber.export(outfile, level, namespaceprefix_, namespacedef_='', name_='SCANFormNumber', pretty_print=pretty_print) + if self.SCANFormImage is not None: + namespaceprefix_ = self.SCANFormImage_nsprefix_ + ':' if (UseCapturedNS_ and self.SCANFormImage_nsprefix_) else '' + self.SCANFormImage.export(outfile, level, namespaceprefix_, namespacedef_='', name_='SCANFormImage', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'SCANFormNumber': + obj_ = SCANFormNumberType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.SCANFormNumber = obj_ + obj_.original_tagname_ = 'SCANFormNumber' + elif nodeName_ == 'SCANFormImage': + obj_ = SCANFormImageType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.SCANFormImage = obj_ + obj_.original_tagname_ = 'SCANFormImage' +# end class SCANResponse + + +class SCANFormNumberType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ShipDate=None, EntryZipCode=None, valueOf_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ShipDate = _cast(None, ShipDate) + self.ShipDate_nsprefix_ = None + self.EntryZipCode = _cast(int, EntryZipCode) + self.EntryZipCode_nsprefix_ = None + self.valueOf_ = valueOf_ + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, SCANFormNumberType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if SCANFormNumberType.subclass: + return SCANFormNumberType.subclass(*args_, **kwargs_) + else: + return SCANFormNumberType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ShipDate(self): + return self.ShipDate + def set_ShipDate(self, ShipDate): + self.ShipDate = ShipDate + def get_EntryZipCode(self): + return self.EntryZipCode + def set_EntryZipCode(self, EntryZipCode): + self.EntryZipCode = EntryZipCode + def get_valueOf_(self): return self.valueOf_ + def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_ + def has__content(self): + if ( + (1 if type(self.valueOf_) in [int,float] else self.valueOf_) + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SCANFormNumberType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('SCANFormNumberType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'SCANFormNumberType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SCANFormNumberType') + outfile.write('>') + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_, pretty_print=pretty_print) + outfile.write(self.convert_unicode(self.valueOf_)) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SCANFormNumberType'): + if self.ShipDate is not None and 'ShipDate' not in already_processed: + already_processed.add('ShipDate') + outfile.write(' ShipDate=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ShipDate), input_name='ShipDate')), )) + if self.EntryZipCode is not None and 'EntryZipCode' not in already_processed: + already_processed.add('EntryZipCode') + outfile.write(' EntryZipCode="%s"' % self.gds_format_integer(self.EntryZipCode, input_name='EntryZipCode')) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SCANFormNumberType', fromsubclass_=False, pretty_print=True): + pass + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ShipDate', node) + if value is not None and 'ShipDate' not in already_processed: + already_processed.add('ShipDate') + self.ShipDate = value + value = find_attr_value_('EntryZipCode', node) + if value is not None and 'EntryZipCode' not in already_processed: + already_processed.add('EntryZipCode') + self.EntryZipCode = self.gds_parse_integer(value, node, 'EntryZipCode') + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + pass +# end class SCANFormNumberType + + +class SCANFormImageType(GeneratedsSuper): + """SCANFormImageType -- 65255 skipped + + """ + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ShipDate=None, EntryZipCode=None, valueOf_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ShipDate = _cast(None, ShipDate) + self.ShipDate_nsprefix_ = None + self.EntryZipCode = _cast(int, EntryZipCode) + self.EntryZipCode_nsprefix_ = None + self.valueOf_ = valueOf_ + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, SCANFormImageType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if SCANFormImageType.subclass: + return SCANFormImageType.subclass(*args_, **kwargs_) + else: + return SCANFormImageType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ShipDate(self): + return self.ShipDate + def set_ShipDate(self, ShipDate): + self.ShipDate = ShipDate + def get_EntryZipCode(self): + return self.EntryZipCode + def set_EntryZipCode(self, EntryZipCode): + self.EntryZipCode = EntryZipCode + def get_valueOf_(self): return self.valueOf_ + def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_ + def has__content(self): + if ( + (1 if type(self.valueOf_) in [int,float] else self.valueOf_) + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SCANFormImageType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('SCANFormImageType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'SCANFormImageType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SCANFormImageType') + outfile.write('>') + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_, pretty_print=pretty_print) + outfile.write(self.convert_unicode(self.valueOf_)) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SCANFormImageType'): + if self.ShipDate is not None and 'ShipDate' not in already_processed: + already_processed.add('ShipDate') + outfile.write(' ShipDate=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ShipDate), input_name='ShipDate')), )) + if self.EntryZipCode is not None and 'EntryZipCode' not in already_processed: + already_processed.add('EntryZipCode') + outfile.write(' EntryZipCode="%s"' % self.gds_format_integer(self.EntryZipCode, input_name='EntryZipCode')) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SCANFormImageType', fromsubclass_=False, pretty_print=True): + pass + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ShipDate', node) + if value is not None and 'ShipDate' not in already_processed: + already_processed.add('ShipDate') + self.ShipDate = value + value = find_attr_value_('EntryZipCode', node) + if value is not None and 'EntryZipCode' not in already_processed: + already_processed.add('EntryZipCode') + self.EntryZipCode = self.gds_parse_integer(value, node, 'EntryZipCode') + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + pass +# end class SCANFormImageType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SCANResponse' + rootClass = SCANResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SCANResponse' + rootClass = SCANResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SCANResponse' + rootClass = SCANResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SCANResponse' + rootClass = SCANResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from scan_response import *\n\n') + sys.stdout.write('import scan_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "SCANFormImageType", + "SCANFormNumberType", + "SCANResponse" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/sdc_get_locations_request.py b/modules/connectors/usps/karrio/schemas/usps/sdc_get_locations_request.py new file mode 100644 index 0000000000..2917aa2c5a --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/sdc_get_locations_request.py @@ -0,0 +1,1437 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:45 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/sdc_get_locations_request.py') +# +# Command line arguments: +# ./schemas/SDCGetLocationsRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/sdc_get_locations_request.py" ./schemas/SDCGetLocationsRequest.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class SDCGetLocationsRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, MailClass=None, OriginZIP=None, DestinationZIP=None, AcceptDate=None, AcceptTime=None, NonExpeditedDetail=None, ClientType=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.MailClass = MailClass + self.MailClass_nsprefix_ = None + self.OriginZIP = OriginZIP + self.OriginZIP_nsprefix_ = None + self.DestinationZIP = DestinationZIP + self.DestinationZIP_nsprefix_ = None + self.AcceptDate = AcceptDate + self.AcceptDate_nsprefix_ = None + self.AcceptTime = AcceptTime + self.AcceptTime_nsprefix_ = None + self.NonExpeditedDetail = NonExpeditedDetail + self.NonExpeditedDetail_nsprefix_ = None + self.ClientType = ClientType + self.ClientType_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, SDCGetLocationsRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if SDCGetLocationsRequest.subclass: + return SDCGetLocationsRequest.subclass(*args_, **kwargs_) + else: + return SDCGetLocationsRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_MailClass(self): + return self.MailClass + def set_MailClass(self, MailClass): + self.MailClass = MailClass + def get_OriginZIP(self): + return self.OriginZIP + def set_OriginZIP(self, OriginZIP): + self.OriginZIP = OriginZIP + def get_DestinationZIP(self): + return self.DestinationZIP + def set_DestinationZIP(self, DestinationZIP): + self.DestinationZIP = DestinationZIP + def get_AcceptDate(self): + return self.AcceptDate + def set_AcceptDate(self, AcceptDate): + self.AcceptDate = AcceptDate + def get_AcceptTime(self): + return self.AcceptTime + def set_AcceptTime(self, AcceptTime): + self.AcceptTime = AcceptTime + def get_NonExpeditedDetail(self): + return self.NonExpeditedDetail + def set_NonExpeditedDetail(self, NonExpeditedDetail): + self.NonExpeditedDetail = NonExpeditedDetail + def get_ClientType(self): + return self.ClientType + def set_ClientType(self, ClientType): + self.ClientType = ClientType + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.MailClass is not None or + self.OriginZIP is not None or + self.DestinationZIP is not None or + self.AcceptDate is not None or + self.AcceptTime is not None or + self.NonExpeditedDetail is not None or + self.ClientType is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SDCGetLocationsRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('SDCGetLocationsRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'SDCGetLocationsRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SDCGetLocationsRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SDCGetLocationsRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SDCGetLocationsRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SDCGetLocationsRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.MailClass is not None: + namespaceprefix_ = self.MailClass_nsprefix_ + ':' if (UseCapturedNS_ and self.MailClass_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMailClass>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.MailClass, input_name='MailClass'), namespaceprefix_ , eol_)) + if self.OriginZIP is not None: + namespaceprefix_ = self.OriginZIP_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginZIP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginZIP>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.OriginZIP, input_name='OriginZIP'), namespaceprefix_ , eol_)) + if self.DestinationZIP is not None: + namespaceprefix_ = self.DestinationZIP_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationZIP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationZIP>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.DestinationZIP, input_name='DestinationZIP'), namespaceprefix_ , eol_)) + if self.AcceptDate is not None: + namespaceprefix_ = self.AcceptDate_nsprefix_ + ':' if (UseCapturedNS_ and self.AcceptDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAcceptDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AcceptDate), input_name='AcceptDate')), namespaceprefix_ , eol_)) + if self.AcceptTime is not None: + namespaceprefix_ = self.AcceptTime_nsprefix_ + ':' if (UseCapturedNS_ and self.AcceptTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAcceptTime>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.AcceptTime, input_name='AcceptTime'), namespaceprefix_ , eol_)) + if self.NonExpeditedDetail is not None: + namespaceprefix_ = self.NonExpeditedDetail_nsprefix_ + ':' if (UseCapturedNS_ and self.NonExpeditedDetail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNonExpeditedDetail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.NonExpeditedDetail), input_name='NonExpeditedDetail')), namespaceprefix_ , eol_)) + if self.ClientType is not None: + namespaceprefix_ = self.ClientType_nsprefix_ + ':' if (UseCapturedNS_ and self.ClientType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sClientType>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ClientType, input_name='ClientType'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'MailClass' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'MailClass') + ival_ = self.gds_validate_integer(ival_, node, 'MailClass') + self.MailClass = ival_ + self.MailClass_nsprefix_ = child_.prefix + elif nodeName_ == 'OriginZIP' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'OriginZIP') + ival_ = self.gds_validate_integer(ival_, node, 'OriginZIP') + self.OriginZIP = ival_ + self.OriginZIP_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationZIP' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'DestinationZIP') + ival_ = self.gds_validate_integer(ival_, node, 'DestinationZIP') + self.DestinationZIP = ival_ + self.DestinationZIP_nsprefix_ = child_.prefix + elif nodeName_ == 'AcceptDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AcceptDate') + value_ = self.gds_validate_string(value_, node, 'AcceptDate') + self.AcceptDate = value_ + self.AcceptDate_nsprefix_ = child_.prefix + elif nodeName_ == 'AcceptTime' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'AcceptTime') + ival_ = self.gds_validate_integer(ival_, node, 'AcceptTime') + self.AcceptTime = ival_ + self.AcceptTime_nsprefix_ = child_.prefix + elif nodeName_ == 'NonExpeditedDetail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'NonExpeditedDetail') + value_ = self.gds_validate_string(value_, node, 'NonExpeditedDetail') + self.NonExpeditedDetail = value_ + self.NonExpeditedDetail_nsprefix_ = child_.prefix + elif nodeName_ == 'ClientType' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ClientType') + ival_ = self.gds_validate_integer(ival_, node, 'ClientType') + self.ClientType = ival_ + self.ClientType_nsprefix_ = child_.prefix +# end class SDCGetLocationsRequest + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SDCGetLocationsRequest' + rootClass = SDCGetLocationsRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SDCGetLocationsRequest' + rootClass = SDCGetLocationsRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SDCGetLocationsRequest' + rootClass = SDCGetLocationsRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SDCGetLocationsRequest' + rootClass = SDCGetLocationsRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from sdc_get_locations_request import *\n\n') + sys.stdout.write('import sdc_get_locations_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "SDCGetLocationsRequest" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/sdc_get_locations_response.py b/modules/connectors/usps/karrio/schemas/usps/sdc_get_locations_response.py new file mode 100644 index 0000000000..cd7c5f42be --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/sdc_get_locations_response.py @@ -0,0 +1,3217 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:45 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/sdc_get_locations_response.py') +# +# Command line arguments: +# ./schemas/SDCGetLocationsResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/sdc_get_locations_response.py" ./schemas/SDCGetLocationsResponse.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class SDCGetLocationsResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Release=None, MailClass=None, OriginZIP=None, OriginCity=None, OriginState=None, DestZIP=None, DestCity=None, DestState=None, AcceptDate=None, AcceptTime=None, Expedited=None, NonExpedited=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Release = Release + self.Release_nsprefix_ = None + self.MailClass = MailClass + self.MailClass_nsprefix_ = None + self.OriginZIP = OriginZIP + self.OriginZIP_nsprefix_ = None + self.OriginCity = OriginCity + self.OriginCity_nsprefix_ = None + self.OriginState = OriginState + self.OriginState_nsprefix_ = None + self.DestZIP = DestZIP + self.DestZIP_nsprefix_ = None + self.DestCity = DestCity + self.DestCity_nsprefix_ = None + self.DestState = DestState + self.DestState_nsprefix_ = None + if isinstance(AcceptDate, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(AcceptDate, '%Y-%m-%d').date() + else: + initvalue_ = AcceptDate + self.AcceptDate = initvalue_ + self.AcceptDate_nsprefix_ = None + self.AcceptTime = AcceptTime + self.AcceptTime_nsprefix_ = None + self.Expedited = Expedited + self.Expedited_nsprefix_ = None + if NonExpedited is None: + self.NonExpedited = [] + else: + self.NonExpedited = NonExpedited + self.NonExpedited_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, SDCGetLocationsResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if SDCGetLocationsResponse.subclass: + return SDCGetLocationsResponse.subclass(*args_, **kwargs_) + else: + return SDCGetLocationsResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Release(self): + return self.Release + def set_Release(self, Release): + self.Release = Release + def get_MailClass(self): + return self.MailClass + def set_MailClass(self, MailClass): + self.MailClass = MailClass + def get_OriginZIP(self): + return self.OriginZIP + def set_OriginZIP(self, OriginZIP): + self.OriginZIP = OriginZIP + def get_OriginCity(self): + return self.OriginCity + def set_OriginCity(self, OriginCity): + self.OriginCity = OriginCity + def get_OriginState(self): + return self.OriginState + def set_OriginState(self, OriginState): + self.OriginState = OriginState + def get_DestZIP(self): + return self.DestZIP + def set_DestZIP(self, DestZIP): + self.DestZIP = DestZIP + def get_DestCity(self): + return self.DestCity + def set_DestCity(self, DestCity): + self.DestCity = DestCity + def get_DestState(self): + return self.DestState + def set_DestState(self, DestState): + self.DestState = DestState + def get_AcceptDate(self): + return self.AcceptDate + def set_AcceptDate(self, AcceptDate): + self.AcceptDate = AcceptDate + def get_AcceptTime(self): + return self.AcceptTime + def set_AcceptTime(self, AcceptTime): + self.AcceptTime = AcceptTime + def get_Expedited(self): + return self.Expedited + def set_Expedited(self, Expedited): + self.Expedited = Expedited + def get_NonExpedited(self): + return self.NonExpedited + def set_NonExpedited(self, NonExpedited): + self.NonExpedited = NonExpedited + def add_NonExpedited(self, value): + self.NonExpedited.append(value) + def insert_NonExpedited_at(self, index, value): + self.NonExpedited.insert(index, value) + def replace_NonExpedited_at(self, index, value): + self.NonExpedited[index] = value + def has__content(self): + if ( + self.Release is not None or + self.MailClass is not None or + self.OriginZIP is not None or + self.OriginCity is not None or + self.OriginState is not None or + self.DestZIP is not None or + self.DestCity is not None or + self.DestState is not None or + self.AcceptDate is not None or + self.AcceptTime is not None or + self.Expedited is not None or + self.NonExpedited + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SDCGetLocationsResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('SDCGetLocationsResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'SDCGetLocationsResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SDCGetLocationsResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SDCGetLocationsResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SDCGetLocationsResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SDCGetLocationsResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Release is not None: + namespaceprefix_ = self.Release_nsprefix_ + ':' if (UseCapturedNS_ and self.Release_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRelease>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Release, input_name='Release'), namespaceprefix_ , eol_)) + if self.MailClass is not None: + namespaceprefix_ = self.MailClass_nsprefix_ + ':' if (UseCapturedNS_ and self.MailClass_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMailClass>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.MailClass, input_name='MailClass'), namespaceprefix_ , eol_)) + if self.OriginZIP is not None: + namespaceprefix_ = self.OriginZIP_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginZIP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginZIP>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.OriginZIP, input_name='OriginZIP'), namespaceprefix_ , eol_)) + if self.OriginCity is not None: + namespaceprefix_ = self.OriginCity_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.OriginCity), input_name='OriginCity')), namespaceprefix_ , eol_)) + if self.OriginState is not None: + namespaceprefix_ = self.OriginState_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.OriginState), input_name='OriginState')), namespaceprefix_ , eol_)) + if self.DestZIP is not None: + namespaceprefix_ = self.DestZIP_nsprefix_ + ':' if (UseCapturedNS_ and self.DestZIP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestZIP>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.DestZIP, input_name='DestZIP'), namespaceprefix_ , eol_)) + if self.DestCity is not None: + namespaceprefix_ = self.DestCity_nsprefix_ + ':' if (UseCapturedNS_ and self.DestCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DestCity), input_name='DestCity')), namespaceprefix_ , eol_)) + if self.DestState is not None: + namespaceprefix_ = self.DestState_nsprefix_ + ':' if (UseCapturedNS_ and self.DestState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DestState), input_name='DestState')), namespaceprefix_ , eol_)) + if self.AcceptDate is not None: + namespaceprefix_ = self.AcceptDate_nsprefix_ + ':' if (UseCapturedNS_ and self.AcceptDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAcceptDate>%s%s' % (namespaceprefix_ , self.gds_format_date(self.AcceptDate, input_name='AcceptDate'), namespaceprefix_ , eol_)) + if self.AcceptTime is not None: + namespaceprefix_ = self.AcceptTime_nsprefix_ + ':' if (UseCapturedNS_ and self.AcceptTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAcceptTime>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.AcceptTime, input_name='AcceptTime'), namespaceprefix_ , eol_)) + if self.Expedited is not None: + namespaceprefix_ = self.Expedited_nsprefix_ + ':' if (UseCapturedNS_ and self.Expedited_nsprefix_) else '' + self.Expedited.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Expedited', pretty_print=pretty_print) + for NonExpedited_ in self.NonExpedited: + namespaceprefix_ = self.NonExpedited_nsprefix_ + ':' if (UseCapturedNS_ and self.NonExpedited_nsprefix_) else '' + NonExpedited_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='NonExpedited', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Release' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Release') + ival_ = self.gds_validate_integer(ival_, node, 'Release') + self.Release = ival_ + self.Release_nsprefix_ = child_.prefix + elif nodeName_ == 'MailClass' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'MailClass') + ival_ = self.gds_validate_integer(ival_, node, 'MailClass') + self.MailClass = ival_ + self.MailClass_nsprefix_ = child_.prefix + elif nodeName_ == 'OriginZIP' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'OriginZIP') + ival_ = self.gds_validate_integer(ival_, node, 'OriginZIP') + self.OriginZIP = ival_ + self.OriginZIP_nsprefix_ = child_.prefix + elif nodeName_ == 'OriginCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'OriginCity') + value_ = self.gds_validate_string(value_, node, 'OriginCity') + self.OriginCity = value_ + self.OriginCity_nsprefix_ = child_.prefix + elif nodeName_ == 'OriginState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'OriginState') + value_ = self.gds_validate_string(value_, node, 'OriginState') + self.OriginState = value_ + self.OriginState_nsprefix_ = child_.prefix + elif nodeName_ == 'DestZIP' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'DestZIP') + ival_ = self.gds_validate_integer(ival_, node, 'DestZIP') + self.DestZIP = ival_ + self.DestZIP_nsprefix_ = child_.prefix + elif nodeName_ == 'DestCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DestCity') + value_ = self.gds_validate_string(value_, node, 'DestCity') + self.DestCity = value_ + self.DestCity_nsprefix_ = child_.prefix + elif nodeName_ == 'DestState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DestState') + value_ = self.gds_validate_string(value_, node, 'DestState') + self.DestState = value_ + self.DestState_nsprefix_ = child_.prefix + elif nodeName_ == 'AcceptDate': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.AcceptDate = dval_ + self.AcceptDate_nsprefix_ = child_.prefix + elif nodeName_ == 'AcceptTime' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'AcceptTime') + ival_ = self.gds_validate_integer(ival_, node, 'AcceptTime') + self.AcceptTime = ival_ + self.AcceptTime_nsprefix_ = child_.prefix + elif nodeName_ == 'Expedited': + obj_ = ExpeditedType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Expedited = obj_ + obj_.original_tagname_ = 'Expedited' + elif nodeName_ == 'NonExpedited': + obj_ = NonExpeditedType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.NonExpedited.append(obj_) + obj_.original_tagname_ = 'NonExpedited' +# end class SDCGetLocationsResponse + + +class ExpeditedType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, EAD=None, Commitment=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if isinstance(EAD, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(EAD, '%Y-%m-%d').date() + else: + initvalue_ = EAD + self.EAD = initvalue_ + self.EAD_nsprefix_ = None + if Commitment is None: + self.Commitment = [] + else: + self.Commitment = Commitment + self.Commitment_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExpeditedType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExpeditedType.subclass: + return ExpeditedType.subclass(*args_, **kwargs_) + else: + return ExpeditedType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_EAD(self): + return self.EAD + def set_EAD(self, EAD): + self.EAD = EAD + def get_Commitment(self): + return self.Commitment + def set_Commitment(self, Commitment): + self.Commitment = Commitment + def add_Commitment(self, value): + self.Commitment.append(value) + def insert_Commitment_at(self, index, value): + self.Commitment.insert(index, value) + def replace_Commitment_at(self, index, value): + self.Commitment[index] = value + def has__content(self): + if ( + self.EAD is not None or + self.Commitment + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpeditedType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExpeditedType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExpeditedType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpeditedType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExpeditedType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExpeditedType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpeditedType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.EAD is not None: + namespaceprefix_ = self.EAD_nsprefix_ + ':' if (UseCapturedNS_ and self.EAD_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEAD>%s%s' % (namespaceprefix_ , self.gds_format_date(self.EAD, input_name='EAD'), namespaceprefix_ , eol_)) + for Commitment_ in self.Commitment: + namespaceprefix_ = self.Commitment_nsprefix_ + ':' if (UseCapturedNS_ and self.Commitment_nsprefix_) else '' + Commitment_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Commitment', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'EAD': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.EAD = dval_ + self.EAD_nsprefix_ = child_.prefix + elif nodeName_ == 'Commitment': + obj_ = CommitmentType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Commitment.append(obj_) + obj_.original_tagname_ = 'Commitment' +# end class ExpeditedType + + +class CommitmentType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, MailClass=None, CommitmentName=None, CommitmentTime=None, CommitmentSeq=None, Location=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.MailClass = MailClass + self.MailClass_nsprefix_ = None + self.CommitmentName = CommitmentName + self.CommitmentName_nsprefix_ = None + self.CommitmentTime = CommitmentTime + self.CommitmentTime_nsprefix_ = None + self.CommitmentSeq = CommitmentSeq + self.CommitmentSeq_nsprefix_ = None + if Location is None: + self.Location = [] + else: + self.Location = Location + self.Location_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CommitmentType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CommitmentType.subclass: + return CommitmentType.subclass(*args_, **kwargs_) + else: + return CommitmentType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_MailClass(self): + return self.MailClass + def set_MailClass(self, MailClass): + self.MailClass = MailClass + def get_CommitmentName(self): + return self.CommitmentName + def set_CommitmentName(self, CommitmentName): + self.CommitmentName = CommitmentName + def get_CommitmentTime(self): + return self.CommitmentTime + def set_CommitmentTime(self, CommitmentTime): + self.CommitmentTime = CommitmentTime + def get_CommitmentSeq(self): + return self.CommitmentSeq + def set_CommitmentSeq(self, CommitmentSeq): + self.CommitmentSeq = CommitmentSeq + def get_Location(self): + return self.Location + def set_Location(self, Location): + self.Location = Location + def add_Location(self, value): + self.Location.append(value) + def insert_Location_at(self, index, value): + self.Location.insert(index, value) + def replace_Location_at(self, index, value): + self.Location[index] = value + def has__content(self): + if ( + self.MailClass is not None or + self.CommitmentName is not None or + self.CommitmentTime is not None or + self.CommitmentSeq is not None or + self.Location + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CommitmentType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CommitmentType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CommitmentType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CommitmentType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CommitmentType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CommitmentType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CommitmentType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.MailClass is not None: + namespaceprefix_ = self.MailClass_nsprefix_ + ':' if (UseCapturedNS_ and self.MailClass_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMailClass>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.MailClass, input_name='MailClass'), namespaceprefix_ , eol_)) + if self.CommitmentName is not None: + namespaceprefix_ = self.CommitmentName_nsprefix_ + ':' if (UseCapturedNS_ and self.CommitmentName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommitmentName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CommitmentName), input_name='CommitmentName')), namespaceprefix_ , eol_)) + if self.CommitmentTime is not None: + namespaceprefix_ = self.CommitmentTime_nsprefix_ + ':' if (UseCapturedNS_ and self.CommitmentTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommitmentTime>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.CommitmentTime, input_name='CommitmentTime'), namespaceprefix_ , eol_)) + if self.CommitmentSeq is not None: + namespaceprefix_ = self.CommitmentSeq_nsprefix_ + ':' if (UseCapturedNS_ and self.CommitmentSeq_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommitmentSeq>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CommitmentSeq), input_name='CommitmentSeq')), namespaceprefix_ , eol_)) + for Location_ in self.Location: + namespaceprefix_ = self.Location_nsprefix_ + ':' if (UseCapturedNS_ and self.Location_nsprefix_) else '' + Location_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Location', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'MailClass' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'MailClass') + ival_ = self.gds_validate_integer(ival_, node, 'MailClass') + self.MailClass = ival_ + self.MailClass_nsprefix_ = child_.prefix + elif nodeName_ == 'CommitmentName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CommitmentName') + value_ = self.gds_validate_string(value_, node, 'CommitmentName') + self.CommitmentName = value_ + self.CommitmentName_nsprefix_ = child_.prefix + elif nodeName_ == 'CommitmentTime' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'CommitmentTime') + ival_ = self.gds_validate_integer(ival_, node, 'CommitmentTime') + self.CommitmentTime = ival_ + self.CommitmentTime_nsprefix_ = child_.prefix + elif nodeName_ == 'CommitmentSeq': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CommitmentSeq') + value_ = self.gds_validate_string(value_, node, 'CommitmentSeq') + self.CommitmentSeq = value_ + self.CommitmentSeq_nsprefix_ = child_.prefix + elif nodeName_ == 'Location': + obj_ = LocationType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Location.append(obj_) + obj_.original_tagname_ = 'Location' +# end class CommitmentType + + +class LocationType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, SDD=None, COT=None, FacType=None, Street=None, City=None, State=None, ZIP=None, IsGuaranteed=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if isinstance(SDD, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(SDD, '%Y-%m-%d').date() + else: + initvalue_ = SDD + self.SDD = initvalue_ + self.SDD_nsprefix_ = None + self.COT = COT + self.COT_nsprefix_ = None + self.FacType = FacType + self.FacType_nsprefix_ = None + self.Street = Street + self.Street_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.ZIP = ZIP + self.ZIP_nsprefix_ = None + self.IsGuaranteed = IsGuaranteed + self.IsGuaranteed_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, LocationType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if LocationType.subclass: + return LocationType.subclass(*args_, **kwargs_) + else: + return LocationType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_SDD(self): + return self.SDD + def set_SDD(self, SDD): + self.SDD = SDD + def get_COT(self): + return self.COT + def set_COT(self, COT): + self.COT = COT + def get_FacType(self): + return self.FacType + def set_FacType(self, FacType): + self.FacType = FacType + def get_Street(self): + return self.Street + def set_Street(self, Street): + self.Street = Street + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_ZIP(self): + return self.ZIP + def set_ZIP(self, ZIP): + self.ZIP = ZIP + def get_IsGuaranteed(self): + return self.IsGuaranteed + def set_IsGuaranteed(self, IsGuaranteed): + self.IsGuaranteed = IsGuaranteed + def has__content(self): + if ( + self.SDD is not None or + self.COT is not None or + self.FacType is not None or + self.Street is not None or + self.City is not None or + self.State is not None or + self.ZIP is not None or + self.IsGuaranteed is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LocationType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('LocationType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'LocationType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LocationType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LocationType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LocationType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LocationType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.SDD is not None: + namespaceprefix_ = self.SDD_nsprefix_ + ':' if (UseCapturedNS_ and self.SDD_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSDD>%s%s' % (namespaceprefix_ , self.gds_format_date(self.SDD, input_name='SDD'), namespaceprefix_ , eol_)) + if self.COT is not None: + namespaceprefix_ = self.COT_nsprefix_ + ':' if (UseCapturedNS_ and self.COT_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCOT>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.COT, input_name='COT'), namespaceprefix_ , eol_)) + if self.FacType is not None: + namespaceprefix_ = self.FacType_nsprefix_ + ':' if (UseCapturedNS_ and self.FacType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFacType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FacType), input_name='FacType')), namespaceprefix_ , eol_)) + if self.Street is not None: + namespaceprefix_ = self.Street_nsprefix_ + ':' if (UseCapturedNS_ and self.Street_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sStreet>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Street), input_name='Street')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.ZIP is not None: + namespaceprefix_ = self.ZIP_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ZIP, input_name='ZIP'), namespaceprefix_ , eol_)) + if self.IsGuaranteed is not None: + namespaceprefix_ = self.IsGuaranteed_nsprefix_ + ':' if (UseCapturedNS_ and self.IsGuaranteed_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sIsGuaranteed>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.IsGuaranteed, input_name='IsGuaranteed'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'SDD': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.SDD = dval_ + self.SDD_nsprefix_ = child_.prefix + elif nodeName_ == 'COT' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'COT') + ival_ = self.gds_validate_integer(ival_, node, 'COT') + self.COT = ival_ + self.COT_nsprefix_ = child_.prefix + elif nodeName_ == 'FacType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FacType') + value_ = self.gds_validate_string(value_, node, 'FacType') + self.FacType = value_ + self.FacType_nsprefix_ = child_.prefix + elif nodeName_ == 'Street': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Street') + value_ = self.gds_validate_string(value_, node, 'Street') + self.Street = value_ + self.Street_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ZIP') + ival_ = self.gds_validate_integer(ival_, node, 'ZIP') + self.ZIP = ival_ + self.ZIP_nsprefix_ = child_.prefix + elif nodeName_ == 'IsGuaranteed' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'IsGuaranteed') + ival_ = self.gds_validate_integer(ival_, node, 'IsGuaranteed') + self.IsGuaranteed = ival_ + self.IsGuaranteed_nsprefix_ = child_.prefix +# end class LocationType + + +class NonExpeditedType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, MailClass=None, NonExpeditedDestType=None, EAD=None, COT=None, SvcStdMsg=None, SvcStdDays=None, TotDaysDeliver=None, SchedDlvryDate=None, NonDlvryDays=None, NonExpeditedExceptions=None, HFPU=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.MailClass = MailClass + self.MailClass_nsprefix_ = None + self.NonExpeditedDestType = NonExpeditedDestType + self.NonExpeditedDestType_nsprefix_ = None + if isinstance(EAD, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(EAD, '%Y-%m-%d').date() + else: + initvalue_ = EAD + self.EAD = initvalue_ + self.EAD_nsprefix_ = None + self.COT = COT + self.COT_nsprefix_ = None + self.SvcStdMsg = SvcStdMsg + self.SvcStdMsg_nsprefix_ = None + self.SvcStdDays = SvcStdDays + self.SvcStdDays_nsprefix_ = None + self.TotDaysDeliver = TotDaysDeliver + self.TotDaysDeliver_nsprefix_ = None + if isinstance(SchedDlvryDate, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(SchedDlvryDate, '%Y-%m-%d').date() + else: + initvalue_ = SchedDlvryDate + self.SchedDlvryDate = initvalue_ + self.SchedDlvryDate_nsprefix_ = None + self.NonDlvryDays = NonDlvryDays + self.NonDlvryDays_nsprefix_ = None + self.NonExpeditedExceptions = NonExpeditedExceptions + self.NonExpeditedExceptions_nsprefix_ = None + self.HFPU = HFPU + self.HFPU_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, NonExpeditedType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if NonExpeditedType.subclass: + return NonExpeditedType.subclass(*args_, **kwargs_) + else: + return NonExpeditedType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_MailClass(self): + return self.MailClass + def set_MailClass(self, MailClass): + self.MailClass = MailClass + def get_NonExpeditedDestType(self): + return self.NonExpeditedDestType + def set_NonExpeditedDestType(self, NonExpeditedDestType): + self.NonExpeditedDestType = NonExpeditedDestType + def get_EAD(self): + return self.EAD + def set_EAD(self, EAD): + self.EAD = EAD + def get_COT(self): + return self.COT + def set_COT(self, COT): + self.COT = COT + def get_SvcStdMsg(self): + return self.SvcStdMsg + def set_SvcStdMsg(self, SvcStdMsg): + self.SvcStdMsg = SvcStdMsg + def get_SvcStdDays(self): + return self.SvcStdDays + def set_SvcStdDays(self, SvcStdDays): + self.SvcStdDays = SvcStdDays + def get_TotDaysDeliver(self): + return self.TotDaysDeliver + def set_TotDaysDeliver(self, TotDaysDeliver): + self.TotDaysDeliver = TotDaysDeliver + def get_SchedDlvryDate(self): + return self.SchedDlvryDate + def set_SchedDlvryDate(self, SchedDlvryDate): + self.SchedDlvryDate = SchedDlvryDate + def get_NonDlvryDays(self): + return self.NonDlvryDays + def set_NonDlvryDays(self, NonDlvryDays): + self.NonDlvryDays = NonDlvryDays + def get_NonExpeditedExceptions(self): + return self.NonExpeditedExceptions + def set_NonExpeditedExceptions(self, NonExpeditedExceptions): + self.NonExpeditedExceptions = NonExpeditedExceptions + def get_HFPU(self): + return self.HFPU + def set_HFPU(self, HFPU): + self.HFPU = HFPU + def has__content(self): + if ( + self.MailClass is not None or + self.NonExpeditedDestType is not None or + self.EAD is not None or + self.COT is not None or + self.SvcStdMsg is not None or + self.SvcStdDays is not None or + self.TotDaysDeliver is not None or + self.SchedDlvryDate is not None or + self.NonDlvryDays is not None or + self.NonExpeditedExceptions is not None or + self.HFPU is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NonExpeditedType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('NonExpeditedType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'NonExpeditedType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NonExpeditedType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='NonExpeditedType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='NonExpeditedType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NonExpeditedType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.MailClass is not None: + namespaceprefix_ = self.MailClass_nsprefix_ + ':' if (UseCapturedNS_ and self.MailClass_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMailClass>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.MailClass, input_name='MailClass'), namespaceprefix_ , eol_)) + if self.NonExpeditedDestType is not None: + namespaceprefix_ = self.NonExpeditedDestType_nsprefix_ + ':' if (UseCapturedNS_ and self.NonExpeditedDestType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNonExpeditedDestType>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.NonExpeditedDestType, input_name='NonExpeditedDestType'), namespaceprefix_ , eol_)) + if self.EAD is not None: + namespaceprefix_ = self.EAD_nsprefix_ + ':' if (UseCapturedNS_ and self.EAD_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEAD>%s%s' % (namespaceprefix_ , self.gds_format_date(self.EAD, input_name='EAD'), namespaceprefix_ , eol_)) + if self.COT is not None: + namespaceprefix_ = self.COT_nsprefix_ + ':' if (UseCapturedNS_ and self.COT_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCOT>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.COT, input_name='COT'), namespaceprefix_ , eol_)) + if self.SvcStdMsg is not None: + namespaceprefix_ = self.SvcStdMsg_nsprefix_ + ':' if (UseCapturedNS_ and self.SvcStdMsg_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSvcStdMsg>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SvcStdMsg), input_name='SvcStdMsg')), namespaceprefix_ , eol_)) + if self.SvcStdDays is not None: + namespaceprefix_ = self.SvcStdDays_nsprefix_ + ':' if (UseCapturedNS_ and self.SvcStdDays_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSvcStdDays>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.SvcStdDays, input_name='SvcStdDays'), namespaceprefix_ , eol_)) + if self.TotDaysDeliver is not None: + namespaceprefix_ = self.TotDaysDeliver_nsprefix_ + ':' if (UseCapturedNS_ and self.TotDaysDeliver_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTotDaysDeliver>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.TotDaysDeliver, input_name='TotDaysDeliver'), namespaceprefix_ , eol_)) + if self.SchedDlvryDate is not None: + namespaceprefix_ = self.SchedDlvryDate_nsprefix_ + ':' if (UseCapturedNS_ and self.SchedDlvryDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSchedDlvryDate>%s%s' % (namespaceprefix_ , self.gds_format_date(self.SchedDlvryDate, input_name='SchedDlvryDate'), namespaceprefix_ , eol_)) + if self.NonDlvryDays is not None: + namespaceprefix_ = self.NonDlvryDays_nsprefix_ + ':' if (UseCapturedNS_ and self.NonDlvryDays_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNonDlvryDays>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.NonDlvryDays, input_name='NonDlvryDays'), namespaceprefix_ , eol_)) + if self.NonExpeditedExceptions is not None: + namespaceprefix_ = self.NonExpeditedExceptions_nsprefix_ + ':' if (UseCapturedNS_ and self.NonExpeditedExceptions_nsprefix_) else '' + self.NonExpeditedExceptions.export(outfile, level, namespaceprefix_, namespacedef_='', name_='NonExpeditedExceptions', pretty_print=pretty_print) + if self.HFPU is not None: + namespaceprefix_ = self.HFPU_nsprefix_ + ':' if (UseCapturedNS_ and self.HFPU_nsprefix_) else '' + self.HFPU.export(outfile, level, namespaceprefix_, namespacedef_='', name_='HFPU', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'MailClass' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'MailClass') + ival_ = self.gds_validate_integer(ival_, node, 'MailClass') + self.MailClass = ival_ + self.MailClass_nsprefix_ = child_.prefix + elif nodeName_ == 'NonExpeditedDestType' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'NonExpeditedDestType') + ival_ = self.gds_validate_integer(ival_, node, 'NonExpeditedDestType') + self.NonExpeditedDestType = ival_ + self.NonExpeditedDestType_nsprefix_ = child_.prefix + elif nodeName_ == 'EAD': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.EAD = dval_ + self.EAD_nsprefix_ = child_.prefix + elif nodeName_ == 'COT' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'COT') + ival_ = self.gds_validate_integer(ival_, node, 'COT') + self.COT = ival_ + self.COT_nsprefix_ = child_.prefix + elif nodeName_ == 'SvcStdMsg': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SvcStdMsg') + value_ = self.gds_validate_string(value_, node, 'SvcStdMsg') + self.SvcStdMsg = value_ + self.SvcStdMsg_nsprefix_ = child_.prefix + elif nodeName_ == 'SvcStdDays' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'SvcStdDays') + ival_ = self.gds_validate_integer(ival_, node, 'SvcStdDays') + self.SvcStdDays = ival_ + self.SvcStdDays_nsprefix_ = child_.prefix + elif nodeName_ == 'TotDaysDeliver' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'TotDaysDeliver') + ival_ = self.gds_validate_integer(ival_, node, 'TotDaysDeliver') + self.TotDaysDeliver = ival_ + self.TotDaysDeliver_nsprefix_ = child_.prefix + elif nodeName_ == 'SchedDlvryDate': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.SchedDlvryDate = dval_ + self.SchedDlvryDate_nsprefix_ = child_.prefix + elif nodeName_ == 'NonDlvryDays' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'NonDlvryDays') + ival_ = self.gds_validate_integer(ival_, node, 'NonDlvryDays') + self.NonDlvryDays = ival_ + self.NonDlvryDays_nsprefix_ = child_.prefix + elif nodeName_ == 'NonExpeditedExceptions': + obj_ = NonExpeditedExceptionsType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.NonExpeditedExceptions = obj_ + obj_.original_tagname_ = 'NonExpeditedExceptions' + elif nodeName_ == 'HFPU': + obj_ = HFPUType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.HFPU = obj_ + obj_.original_tagname_ = 'HFPU' +# end class NonExpeditedType + + +class NonExpeditedExceptionsType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, SunHol=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.SunHol = SunHol + self.SunHol_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, NonExpeditedExceptionsType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if NonExpeditedExceptionsType.subclass: + return NonExpeditedExceptionsType.subclass(*args_, **kwargs_) + else: + return NonExpeditedExceptionsType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_SunHol(self): + return self.SunHol + def set_SunHol(self, SunHol): + self.SunHol = SunHol + def has__content(self): + if ( + self.SunHol is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NonExpeditedExceptionsType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('NonExpeditedExceptionsType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'NonExpeditedExceptionsType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NonExpeditedExceptionsType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='NonExpeditedExceptionsType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='NonExpeditedExceptionsType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NonExpeditedExceptionsType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.SunHol is not None: + namespaceprefix_ = self.SunHol_nsprefix_ + ':' if (UseCapturedNS_ and self.SunHol_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSunHol>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.SunHol, input_name='SunHol'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'SunHol' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'SunHol') + ival_ = self.gds_validate_integer(ival_, node, 'SunHol') + self.SunHol = ival_ + self.SunHol_nsprefix_ = child_.prefix +# end class NonExpeditedExceptionsType + + +class HFPUType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, EAD=None, COT=None, ServiceStandard=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if isinstance(EAD, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(EAD, '%Y-%m-%d').date() + else: + initvalue_ = EAD + self.EAD = initvalue_ + self.EAD_nsprefix_ = None + self.COT = COT + self.COT_nsprefix_ = None + self.ServiceStandard = ServiceStandard + self.ServiceStandard_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, HFPUType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if HFPUType.subclass: + return HFPUType.subclass(*args_, **kwargs_) + else: + return HFPUType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_EAD(self): + return self.EAD + def set_EAD(self, EAD): + self.EAD = EAD + def get_COT(self): + return self.COT + def set_COT(self, COT): + self.COT = COT + def get_ServiceStandard(self): + return self.ServiceStandard + def set_ServiceStandard(self, ServiceStandard): + self.ServiceStandard = ServiceStandard + def has__content(self): + if ( + self.EAD is not None or + self.COT is not None or + self.ServiceStandard is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HFPUType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('HFPUType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'HFPUType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='HFPUType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='HFPUType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='HFPUType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HFPUType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.EAD is not None: + namespaceprefix_ = self.EAD_nsprefix_ + ':' if (UseCapturedNS_ and self.EAD_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEAD>%s%s' % (namespaceprefix_ , self.gds_format_date(self.EAD, input_name='EAD'), namespaceprefix_ , eol_)) + if self.COT is not None: + namespaceprefix_ = self.COT_nsprefix_ + ':' if (UseCapturedNS_ and self.COT_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCOT>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.COT, input_name='COT'), namespaceprefix_ , eol_)) + if self.ServiceStandard is not None: + namespaceprefix_ = self.ServiceStandard_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceStandard_nsprefix_) else '' + self.ServiceStandard.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ServiceStandard', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'EAD': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.EAD = dval_ + self.EAD_nsprefix_ = child_.prefix + elif nodeName_ == 'COT' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'COT') + ival_ = self.gds_validate_integer(ival_, node, 'COT') + self.COT = ival_ + self.COT_nsprefix_ = child_.prefix + elif nodeName_ == 'ServiceStandard': + obj_ = ServiceStandardType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ServiceStandard = obj_ + obj_.original_tagname_ = 'ServiceStandard' +# end class HFPUType + + +class ServiceStandardType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, SvcStdMsg=None, SvcStdDays=None, Location=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.SvcStdMsg = SvcStdMsg + self.SvcStdMsg_nsprefix_ = None + self.SvcStdDays = SvcStdDays + self.SvcStdDays_nsprefix_ = None + self.Location = Location + self.Location_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ServiceStandardType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ServiceStandardType.subclass: + return ServiceStandardType.subclass(*args_, **kwargs_) + else: + return ServiceStandardType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_SvcStdMsg(self): + return self.SvcStdMsg + def set_SvcStdMsg(self, SvcStdMsg): + self.SvcStdMsg = SvcStdMsg + def get_SvcStdDays(self): + return self.SvcStdDays + def set_SvcStdDays(self, SvcStdDays): + self.SvcStdDays = SvcStdDays + def get_Location(self): + return self.Location + def set_Location(self, Location): + self.Location = Location + def has__content(self): + if ( + self.SvcStdMsg is not None or + self.SvcStdDays is not None or + self.Location is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ServiceStandardType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ServiceStandardType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ServiceStandardType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ServiceStandardType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ServiceStandardType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ServiceStandardType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ServiceStandardType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.SvcStdMsg is not None: + namespaceprefix_ = self.SvcStdMsg_nsprefix_ + ':' if (UseCapturedNS_ and self.SvcStdMsg_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSvcStdMsg>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SvcStdMsg), input_name='SvcStdMsg')), namespaceprefix_ , eol_)) + if self.SvcStdDays is not None: + namespaceprefix_ = self.SvcStdDays_nsprefix_ + ':' if (UseCapturedNS_ and self.SvcStdDays_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSvcStdDays>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.SvcStdDays, input_name='SvcStdDays'), namespaceprefix_ , eol_)) + if self.Location is not None: + namespaceprefix_ = self.Location_nsprefix_ + ':' if (UseCapturedNS_ and self.Location_nsprefix_) else '' + self.Location.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Location', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'SvcStdMsg': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SvcStdMsg') + value_ = self.gds_validate_string(value_, node, 'SvcStdMsg') + self.SvcStdMsg = value_ + self.SvcStdMsg_nsprefix_ = child_.prefix + elif nodeName_ == 'SvcStdDays' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'SvcStdDays') + ival_ = self.gds_validate_integer(ival_, node, 'SvcStdDays') + self.SvcStdDays = ival_ + self.SvcStdDays_nsprefix_ = child_.prefix + elif nodeName_ == 'Location': + obj_ = LocationType1.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Location = obj_ + obj_.original_tagname_ = 'Location' +# end class ServiceStandardType + + +class LocationType1(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, TotDaysDeliver=None, SchedDlvryDate=None, NonDlvryDays=None, RAUName=None, Street=None, ZIP=None, CloseTimes=None, NonExpeditedExceptions=None, City=None, State=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.TotDaysDeliver = TotDaysDeliver + self.TotDaysDeliver_nsprefix_ = None + if isinstance(SchedDlvryDate, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(SchedDlvryDate, '%Y-%m-%d').date() + else: + initvalue_ = SchedDlvryDate + self.SchedDlvryDate = initvalue_ + self.SchedDlvryDate_nsprefix_ = None + self.NonDlvryDays = NonDlvryDays + self.NonDlvryDays_nsprefix_ = None + self.RAUName = RAUName + self.RAUName_nsprefix_ = None + self.Street = Street + self.Street_nsprefix_ = None + self.ZIP = ZIP + self.ZIP_nsprefix_ = None + self.CloseTimes = CloseTimes + self.CloseTimes_nsprefix_ = None + self.NonExpeditedExceptions = NonExpeditedExceptions + self.NonExpeditedExceptions_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, LocationType1) + if subclass is not None: + return subclass(*args_, **kwargs_) + if LocationType1.subclass: + return LocationType1.subclass(*args_, **kwargs_) + else: + return LocationType1(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_TotDaysDeliver(self): + return self.TotDaysDeliver + def set_TotDaysDeliver(self, TotDaysDeliver): + self.TotDaysDeliver = TotDaysDeliver + def get_SchedDlvryDate(self): + return self.SchedDlvryDate + def set_SchedDlvryDate(self, SchedDlvryDate): + self.SchedDlvryDate = SchedDlvryDate + def get_NonDlvryDays(self): + return self.NonDlvryDays + def set_NonDlvryDays(self, NonDlvryDays): + self.NonDlvryDays = NonDlvryDays + def get_RAUName(self): + return self.RAUName + def set_RAUName(self, RAUName): + self.RAUName = RAUName + def get_Street(self): + return self.Street + def set_Street(self, Street): + self.Street = Street + def get_ZIP(self): + return self.ZIP + def set_ZIP(self, ZIP): + self.ZIP = ZIP + def get_CloseTimes(self): + return self.CloseTimes + def set_CloseTimes(self, CloseTimes): + self.CloseTimes = CloseTimes + def get_NonExpeditedExceptions(self): + return self.NonExpeditedExceptions + def set_NonExpeditedExceptions(self, NonExpeditedExceptions): + self.NonExpeditedExceptions = NonExpeditedExceptions + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def has__content(self): + if ( + self.TotDaysDeliver is not None or + self.SchedDlvryDate is not None or + self.NonDlvryDays is not None or + self.RAUName is not None or + self.Street is not None or + self.ZIP is not None or + self.CloseTimes is not None or + self.NonExpeditedExceptions is not None or + self.City is not None or + self.State is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LocationType1', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('LocationType1') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'LocationType1': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LocationType1') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LocationType1', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LocationType1'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LocationType1', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.TotDaysDeliver is not None: + namespaceprefix_ = self.TotDaysDeliver_nsprefix_ + ':' if (UseCapturedNS_ and self.TotDaysDeliver_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTotDaysDeliver>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.TotDaysDeliver, input_name='TotDaysDeliver'), namespaceprefix_ , eol_)) + if self.SchedDlvryDate is not None: + namespaceprefix_ = self.SchedDlvryDate_nsprefix_ + ':' if (UseCapturedNS_ and self.SchedDlvryDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSchedDlvryDate>%s%s' % (namespaceprefix_ , self.gds_format_date(self.SchedDlvryDate, input_name='SchedDlvryDate'), namespaceprefix_ , eol_)) + if self.NonDlvryDays is not None: + namespaceprefix_ = self.NonDlvryDays_nsprefix_ + ':' if (UseCapturedNS_ and self.NonDlvryDays_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNonDlvryDays>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.NonDlvryDays, input_name='NonDlvryDays'), namespaceprefix_ , eol_)) + if self.RAUName is not None: + namespaceprefix_ = self.RAUName_nsprefix_ + ':' if (UseCapturedNS_ and self.RAUName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRAUName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RAUName), input_name='RAUName')), namespaceprefix_ , eol_)) + if self.Street is not None: + namespaceprefix_ = self.Street_nsprefix_ + ':' if (UseCapturedNS_ and self.Street_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sStreet>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Street), input_name='Street')), namespaceprefix_ , eol_)) + if self.ZIP is not None: + namespaceprefix_ = self.ZIP_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ZIP, input_name='ZIP'), namespaceprefix_ , eol_)) + if self.CloseTimes is not None: + namespaceprefix_ = self.CloseTimes_nsprefix_ + ':' if (UseCapturedNS_ and self.CloseTimes_nsprefix_) else '' + self.CloseTimes.export(outfile, level, namespaceprefix_, namespacedef_='', name_='CloseTimes', pretty_print=pretty_print) + if self.NonExpeditedExceptions is not None: + namespaceprefix_ = self.NonExpeditedExceptions_nsprefix_ + ':' if (UseCapturedNS_ and self.NonExpeditedExceptions_nsprefix_) else '' + self.NonExpeditedExceptions.export(outfile, level, namespaceprefix_, namespacedef_='', name_='NonExpeditedExceptions', pretty_print=pretty_print) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'TotDaysDeliver' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'TotDaysDeliver') + ival_ = self.gds_validate_integer(ival_, node, 'TotDaysDeliver') + self.TotDaysDeliver = ival_ + self.TotDaysDeliver_nsprefix_ = child_.prefix + elif nodeName_ == 'SchedDlvryDate': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.SchedDlvryDate = dval_ + self.SchedDlvryDate_nsprefix_ = child_.prefix + elif nodeName_ == 'NonDlvryDays' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'NonDlvryDays') + ival_ = self.gds_validate_integer(ival_, node, 'NonDlvryDays') + self.NonDlvryDays = ival_ + self.NonDlvryDays_nsprefix_ = child_.prefix + elif nodeName_ == 'RAUName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RAUName') + value_ = self.gds_validate_string(value_, node, 'RAUName') + self.RAUName = value_ + self.RAUName_nsprefix_ = child_.prefix + elif nodeName_ == 'Street': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Street') + value_ = self.gds_validate_string(value_, node, 'Street') + self.Street = value_ + self.Street_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ZIP') + ival_ = self.gds_validate_integer(ival_, node, 'ZIP') + self.ZIP = ival_ + self.ZIP_nsprefix_ = child_.prefix + elif nodeName_ == 'CloseTimes': + obj_ = CloseTimesType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.CloseTimes = obj_ + obj_.original_tagname_ = 'CloseTimes' + elif nodeName_ == 'NonExpeditedExceptions': + obj_ = NonExpeditedExceptionsType2.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.NonExpeditedExceptions = obj_ + obj_.original_tagname_ = 'NonExpeditedExceptions' + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix +# end class LocationType1 + + +class CloseTimesType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, M=None, Tu=None, W=None, Th=None, F=None, Sa=None, Su=None, H=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.M = M + self.M_nsprefix_ = None + self.Tu = Tu + self.Tu_nsprefix_ = None + self.W = W + self.W_nsprefix_ = None + self.Th = Th + self.Th_nsprefix_ = None + self.F = F + self.F_nsprefix_ = None + self.Sa = Sa + self.Sa_nsprefix_ = None + self.Su = Su + self.Su_nsprefix_ = None + self.H = H + self.H_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CloseTimesType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CloseTimesType.subclass: + return CloseTimesType.subclass(*args_, **kwargs_) + else: + return CloseTimesType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_M(self): + return self.M + def set_M(self, M): + self.M = M + def get_Tu(self): + return self.Tu + def set_Tu(self, Tu): + self.Tu = Tu + def get_W(self): + return self.W + def set_W(self, W): + self.W = W + def get_Th(self): + return self.Th + def set_Th(self, Th): + self.Th = Th + def get_F(self): + return self.F + def set_F(self, F): + self.F = F + def get_Sa(self): + return self.Sa + def set_Sa(self, Sa): + self.Sa = Sa + def get_Su(self): + return self.Su + def set_Su(self, Su): + self.Su = Su + def get_H(self): + return self.H + def set_H(self, H): + self.H = H + def has__content(self): + if ( + self.M is not None or + self.Tu is not None or + self.W is not None or + self.Th is not None or + self.F is not None or + self.Sa is not None or + self.Su is not None or + self.H is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CloseTimesType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CloseTimesType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CloseTimesType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CloseTimesType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CloseTimesType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CloseTimesType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CloseTimesType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.M is not None: + namespaceprefix_ = self.M_nsprefix_ + ':' if (UseCapturedNS_ and self.M_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sM>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.M, input_name='M'), namespaceprefix_ , eol_)) + if self.Tu is not None: + namespaceprefix_ = self.Tu_nsprefix_ + ':' if (UseCapturedNS_ and self.Tu_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTu>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Tu, input_name='Tu'), namespaceprefix_ , eol_)) + if self.W is not None: + namespaceprefix_ = self.W_nsprefix_ + ':' if (UseCapturedNS_ and self.W_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sW>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.W, input_name='W'), namespaceprefix_ , eol_)) + if self.Th is not None: + namespaceprefix_ = self.Th_nsprefix_ + ':' if (UseCapturedNS_ and self.Th_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTh>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Th, input_name='Th'), namespaceprefix_ , eol_)) + if self.F is not None: + namespaceprefix_ = self.F_nsprefix_ + ':' if (UseCapturedNS_ and self.F_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sF>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.F, input_name='F'), namespaceprefix_ , eol_)) + if self.Sa is not None: + namespaceprefix_ = self.Sa_nsprefix_ + ':' if (UseCapturedNS_ and self.Sa_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSa>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Sa, input_name='Sa'), namespaceprefix_ , eol_)) + if self.Su is not None: + namespaceprefix_ = self.Su_nsprefix_ + ':' if (UseCapturedNS_ and self.Su_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSu>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Su, input_name='Su'), namespaceprefix_ , eol_)) + if self.H is not None: + namespaceprefix_ = self.H_nsprefix_ + ':' if (UseCapturedNS_ and self.H_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sH>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.H, input_name='H'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'M' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'M') + ival_ = self.gds_validate_integer(ival_, node, 'M') + self.M = ival_ + self.M_nsprefix_ = child_.prefix + elif nodeName_ == 'Tu' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Tu') + ival_ = self.gds_validate_integer(ival_, node, 'Tu') + self.Tu = ival_ + self.Tu_nsprefix_ = child_.prefix + elif nodeName_ == 'W' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'W') + ival_ = self.gds_validate_integer(ival_, node, 'W') + self.W = ival_ + self.W_nsprefix_ = child_.prefix + elif nodeName_ == 'Th' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Th') + ival_ = self.gds_validate_integer(ival_, node, 'Th') + self.Th = ival_ + self.Th_nsprefix_ = child_.prefix + elif nodeName_ == 'F' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'F') + ival_ = self.gds_validate_integer(ival_, node, 'F') + self.F = ival_ + self.F_nsprefix_ = child_.prefix + elif nodeName_ == 'Sa' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Sa') + ival_ = self.gds_validate_integer(ival_, node, 'Sa') + self.Sa = ival_ + self.Sa_nsprefix_ = child_.prefix + elif nodeName_ == 'Su' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Su') + ival_ = self.gds_validate_integer(ival_, node, 'Su') + self.Su = ival_ + self.Su_nsprefix_ = child_.prefix + elif nodeName_ == 'H' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'H') + ival_ = self.gds_validate_integer(ival_, node, 'H') + self.H = ival_ + self.H_nsprefix_ = child_.prefix +# end class CloseTimesType + + +class NonExpeditedExceptionsType2(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, SunHol=None, Closed=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.SunHol = SunHol + self.SunHol_nsprefix_ = None + self.Closed = Closed + self.Closed_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, NonExpeditedExceptionsType2) + if subclass is not None: + return subclass(*args_, **kwargs_) + if NonExpeditedExceptionsType2.subclass: + return NonExpeditedExceptionsType2.subclass(*args_, **kwargs_) + else: + return NonExpeditedExceptionsType2(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_SunHol(self): + return self.SunHol + def set_SunHol(self, SunHol): + self.SunHol = SunHol + def get_Closed(self): + return self.Closed + def set_Closed(self, Closed): + self.Closed = Closed + def has__content(self): + if ( + self.SunHol is not None or + self.Closed is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NonExpeditedExceptionsType2', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('NonExpeditedExceptionsType2') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'NonExpeditedExceptionsType2': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NonExpeditedExceptionsType2') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='NonExpeditedExceptionsType2', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='NonExpeditedExceptionsType2'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NonExpeditedExceptionsType2', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.SunHol is not None: + namespaceprefix_ = self.SunHol_nsprefix_ + ':' if (UseCapturedNS_ and self.SunHol_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSunHol>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.SunHol, input_name='SunHol'), namespaceprefix_ , eol_)) + if self.Closed is not None: + namespaceprefix_ = self.Closed_nsprefix_ + ':' if (UseCapturedNS_ and self.Closed_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sClosed>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Closed, input_name='Closed'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'SunHol' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'SunHol') + ival_ = self.gds_validate_integer(ival_, node, 'SunHol') + self.SunHol = ival_ + self.SunHol_nsprefix_ = child_.prefix + elif nodeName_ == 'Closed' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Closed') + ival_ = self.gds_validate_integer(ival_, node, 'Closed') + self.Closed = ival_ + self.Closed_nsprefix_ = child_.prefix +# end class NonExpeditedExceptionsType2 + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SDCGetLocationsResponse' + rootClass = SDCGetLocationsResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SDCGetLocationsResponse' + rootClass = SDCGetLocationsResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SDCGetLocationsResponse' + rootClass = SDCGetLocationsResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SDCGetLocationsResponse' + rootClass = SDCGetLocationsResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from sdc_get_locations_response import *\n\n') + sys.stdout.write('import sdc_get_locations_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CloseTimesType", + "CommitmentType", + "ExpeditedType", + "HFPUType", + "LocationType", + "LocationType1", + "NonExpeditedExceptionsType", + "NonExpeditedExceptionsType2", + "NonExpeditedType", + "SDCGetLocationsResponse", + "ServiceStandardType" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/standard_b_request.py b/modules/connectors/usps/karrio/schemas/usps/standard_b_request.py new file mode 100644 index 0000000000..c794b77599 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/standard_b_request.py @@ -0,0 +1,1386 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:46 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/standard_b_request.py') +# +# Command line arguments: +# ./schemas/StandardBRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/standard_b_request.py" ./schemas/StandardBRequest.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class StandardBRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, OriginZip=None, DestinationZip=None, DestinationType=None, ClientType=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.OriginZip = OriginZip + self.OriginZip_nsprefix_ = None + self.DestinationZip = DestinationZip + self.DestinationZip_nsprefix_ = None + self.DestinationType = DestinationType + self.DestinationType_nsprefix_ = None + self.ClientType = ClientType + self.ClientType_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, StandardBRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if StandardBRequest.subclass: + return StandardBRequest.subclass(*args_, **kwargs_) + else: + return StandardBRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_OriginZip(self): + return self.OriginZip + def set_OriginZip(self, OriginZip): + self.OriginZip = OriginZip + def get_DestinationZip(self): + return self.DestinationZip + def set_DestinationZip(self, DestinationZip): + self.DestinationZip = DestinationZip + def get_DestinationType(self): + return self.DestinationType + def set_DestinationType(self, DestinationType): + self.DestinationType = DestinationType + def get_ClientType(self): + return self.ClientType + def set_ClientType(self, ClientType): + self.ClientType = ClientType + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.OriginZip is not None or + self.DestinationZip is not None or + self.DestinationType is not None or + self.ClientType is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='StandardBRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('StandardBRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'StandardBRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='StandardBRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='StandardBRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='StandardBRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='StandardBRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.OriginZip is not None: + namespaceprefix_ = self.OriginZip_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.OriginZip, input_name='OriginZip'), namespaceprefix_ , eol_)) + if self.DestinationZip is not None: + namespaceprefix_ = self.DestinationZip_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.DestinationZip, input_name='DestinationZip'), namespaceprefix_ , eol_)) + if self.DestinationType is not None: + namespaceprefix_ = self.DestinationType_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationType>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.DestinationType, input_name='DestinationType'), namespaceprefix_ , eol_)) + if self.ClientType is not None: + namespaceprefix_ = self.ClientType_nsprefix_ + ':' if (UseCapturedNS_ and self.ClientType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sClientType>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ClientType, input_name='ClientType'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'OriginZip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'OriginZip') + ival_ = self.gds_validate_integer(ival_, node, 'OriginZip') + self.OriginZip = ival_ + self.OriginZip_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationZip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'DestinationZip') + ival_ = self.gds_validate_integer(ival_, node, 'DestinationZip') + self.DestinationZip = ival_ + self.DestinationZip_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationType' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'DestinationType') + ival_ = self.gds_validate_integer(ival_, node, 'DestinationType') + self.DestinationType = ival_ + self.DestinationType_nsprefix_ = child_.prefix + elif nodeName_ == 'ClientType' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ClientType') + ival_ = self.gds_validate_integer(ival_, node, 'ClientType') + self.ClientType = ival_ + self.ClientType_nsprefix_ = child_.prefix +# end class StandardBRequest + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'StandardBRequest' + rootClass = StandardBRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'StandardBRequest' + rootClass = StandardBRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'StandardBRequest' + rootClass = StandardBRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'StandardBRequest' + rootClass = StandardBRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from standard_b_request import *\n\n') + sys.stdout.write('import standard_b_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "StandardBRequest" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/standard_b_response.py b/modules/connectors/usps/karrio/schemas/usps/standard_b_response.py new file mode 100644 index 0000000000..db725f6d12 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/standard_b_response.py @@ -0,0 +1,1396 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:46 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/standard_b_response.py') +# +# Command line arguments: +# ./schemas/StandardBResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/standard_b_response.py" ./schemas/StandardBResponse.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class StandardBResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, OriginZip=None, DestinationZip=None, Days=None, Message=None, EffectiveAcceptanceDate=None, ScheduledDeliveryDate=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.OriginZip = OriginZip + self.OriginZip_nsprefix_ = None + self.DestinationZip = DestinationZip + self.DestinationZip_nsprefix_ = None + self.Days = Days + self.Days_nsprefix_ = None + self.Message = Message + self.Message_nsprefix_ = None + self.EffectiveAcceptanceDate = EffectiveAcceptanceDate + self.EffectiveAcceptanceDate_nsprefix_ = None + self.ScheduledDeliveryDate = ScheduledDeliveryDate + self.ScheduledDeliveryDate_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, StandardBResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if StandardBResponse.subclass: + return StandardBResponse.subclass(*args_, **kwargs_) + else: + return StandardBResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_OriginZip(self): + return self.OriginZip + def set_OriginZip(self, OriginZip): + self.OriginZip = OriginZip + def get_DestinationZip(self): + return self.DestinationZip + def set_DestinationZip(self, DestinationZip): + self.DestinationZip = DestinationZip + def get_Days(self): + return self.Days + def set_Days(self, Days): + self.Days = Days + def get_Message(self): + return self.Message + def set_Message(self, Message): + self.Message = Message + def get_EffectiveAcceptanceDate(self): + return self.EffectiveAcceptanceDate + def set_EffectiveAcceptanceDate(self, EffectiveAcceptanceDate): + self.EffectiveAcceptanceDate = EffectiveAcceptanceDate + def get_ScheduledDeliveryDate(self): + return self.ScheduledDeliveryDate + def set_ScheduledDeliveryDate(self, ScheduledDeliveryDate): + self.ScheduledDeliveryDate = ScheduledDeliveryDate + def has__content(self): + if ( + self.OriginZip is not None or + self.DestinationZip is not None or + self.Days is not None or + self.Message is not None or + self.EffectiveAcceptanceDate is not None or + self.ScheduledDeliveryDate is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='StandardBResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('StandardBResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'StandardBResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='StandardBResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='StandardBResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='StandardBResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='StandardBResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.OriginZip is not None: + namespaceprefix_ = self.OriginZip_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.OriginZip, input_name='OriginZip'), namespaceprefix_ , eol_)) + if self.DestinationZip is not None: + namespaceprefix_ = self.DestinationZip_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.DestinationZip, input_name='DestinationZip'), namespaceprefix_ , eol_)) + if self.Days is not None: + namespaceprefix_ = self.Days_nsprefix_ + ':' if (UseCapturedNS_ and self.Days_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDays>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Days, input_name='Days'), namespaceprefix_ , eol_)) + if self.Message is not None: + namespaceprefix_ = self.Message_nsprefix_ + ':' if (UseCapturedNS_ and self.Message_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMessage>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Message), input_name='Message')), namespaceprefix_ , eol_)) + if self.EffectiveAcceptanceDate is not None: + namespaceprefix_ = self.EffectiveAcceptanceDate_nsprefix_ + ':' if (UseCapturedNS_ and self.EffectiveAcceptanceDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEffectiveAcceptanceDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EffectiveAcceptanceDate), input_name='EffectiveAcceptanceDate')), namespaceprefix_ , eol_)) + if self.ScheduledDeliveryDate is not None: + namespaceprefix_ = self.ScheduledDeliveryDate_nsprefix_ + ':' if (UseCapturedNS_ and self.ScheduledDeliveryDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sScheduledDeliveryDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ScheduledDeliveryDate), input_name='ScheduledDeliveryDate')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'OriginZip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'OriginZip') + ival_ = self.gds_validate_integer(ival_, node, 'OriginZip') + self.OriginZip = ival_ + self.OriginZip_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationZip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'DestinationZip') + ival_ = self.gds_validate_integer(ival_, node, 'DestinationZip') + self.DestinationZip = ival_ + self.DestinationZip_nsprefix_ = child_.prefix + elif nodeName_ == 'Days' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Days') + ival_ = self.gds_validate_integer(ival_, node, 'Days') + self.Days = ival_ + self.Days_nsprefix_ = child_.prefix + elif nodeName_ == 'Message': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Message') + value_ = self.gds_validate_string(value_, node, 'Message') + self.Message = value_ + self.Message_nsprefix_ = child_.prefix + elif nodeName_ == 'EffectiveAcceptanceDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EffectiveAcceptanceDate') + value_ = self.gds_validate_string(value_, node, 'EffectiveAcceptanceDate') + self.EffectiveAcceptanceDate = value_ + self.EffectiveAcceptanceDate_nsprefix_ = child_.prefix + elif nodeName_ == 'ScheduledDeliveryDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ScheduledDeliveryDate') + value_ = self.gds_validate_string(value_, node, 'ScheduledDeliveryDate') + self.ScheduledDeliveryDate = value_ + self.ScheduledDeliveryDate_nsprefix_ = child_.prefix +# end class StandardBResponse + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'StandardBResponse' + rootClass = StandardBResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'StandardBResponse' + rootClass = StandardBResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'StandardBResponse' + rootClass = StandardBResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'StandardBResponse' + rootClass = StandardBResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from standard_b_response import *\n\n') + sys.stdout.write('import standard_b_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "StandardBResponse" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/track_field_request.py b/modules/connectors/usps/karrio/schemas/usps/track_field_request.py new file mode 100644 index 0000000000..5756bbd9de --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/track_field_request.py @@ -0,0 +1,1520 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:46 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/track_field_request.py') +# +# Command line arguments: +# ./schemas/TrackFieldRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/track_field_request.py" ./schemas/TrackFieldRequest.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class TrackFieldRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, Revision=None, ClientIp=None, SourceId=None, TrackID=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.Revision = Revision + self.Revision_nsprefix_ = None + self.ClientIp = ClientIp + self.ClientIp_nsprefix_ = None + self.SourceId = SourceId + self.SourceId_nsprefix_ = None + if TrackID is None: + self.TrackID = [] + else: + self.TrackID = TrackID + self.TrackID_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, TrackFieldRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if TrackFieldRequest.subclass: + return TrackFieldRequest.subclass(*args_, **kwargs_) + else: + return TrackFieldRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Revision(self): + return self.Revision + def set_Revision(self, Revision): + self.Revision = Revision + def get_ClientIp(self): + return self.ClientIp + def set_ClientIp(self, ClientIp): + self.ClientIp = ClientIp + def get_SourceId(self): + return self.SourceId + def set_SourceId(self, SourceId): + self.SourceId = SourceId + def get_TrackID(self): + return self.TrackID + def set_TrackID(self, TrackID): + self.TrackID = TrackID + def add_TrackID(self, value): + self.TrackID.append(value) + def insert_TrackID_at(self, index, value): + self.TrackID.insert(index, value) + def replace_TrackID_at(self, index, value): + self.TrackID[index] = value + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.Revision is not None or + self.ClientIp is not None or + self.SourceId is not None or + self.TrackID + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackFieldRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('TrackFieldRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'TrackFieldRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TrackFieldRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TrackFieldRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TrackFieldRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackFieldRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Revision is not None: + namespaceprefix_ = self.Revision_nsprefix_ + ':' if (UseCapturedNS_ and self.Revision_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRevision>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Revision), input_name='Revision')), namespaceprefix_ , eol_)) + if self.ClientIp is not None: + namespaceprefix_ = self.ClientIp_nsprefix_ + ':' if (UseCapturedNS_ and self.ClientIp_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sClientIp>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ClientIp), input_name='ClientIp')), namespaceprefix_ , eol_)) + if self.SourceId is not None: + namespaceprefix_ = self.SourceId_nsprefix_ + ':' if (UseCapturedNS_ and self.SourceId_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSourceId>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SourceId), input_name='SourceId')), namespaceprefix_ , eol_)) + for TrackID_ in self.TrackID: + namespaceprefix_ = self.TrackID_nsprefix_ + ':' if (UseCapturedNS_ and self.TrackID_nsprefix_) else '' + TrackID_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TrackID', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Revision': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Revision') + value_ = self.gds_validate_string(value_, node, 'Revision') + self.Revision = value_ + self.Revision_nsprefix_ = child_.prefix + elif nodeName_ == 'ClientIp': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ClientIp') + value_ = self.gds_validate_string(value_, node, 'ClientIp') + self.ClientIp = value_ + self.ClientIp_nsprefix_ = child_.prefix + elif nodeName_ == 'SourceId': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SourceId') + value_ = self.gds_validate_string(value_, node, 'SourceId') + self.SourceId = value_ + self.SourceId_nsprefix_ = child_.prefix + elif nodeName_ == 'TrackID': + obj_ = TrackIDType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.TrackID.append(obj_) + obj_.original_tagname_ = 'TrackID' +# end class TrackFieldRequest + + +class TrackIDType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ID=None, DestinationZipCode=None, MailingDate=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ID = _cast(None, ID) + self.ID_nsprefix_ = None + self.DestinationZipCode = DestinationZipCode + self.DestinationZipCode_nsprefix_ = None + if isinstance(MailingDate, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(MailingDate, '%Y-%m-%d').date() + else: + initvalue_ = MailingDate + self.MailingDate = initvalue_ + self.MailingDate_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, TrackIDType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if TrackIDType.subclass: + return TrackIDType.subclass(*args_, **kwargs_) + else: + return TrackIDType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_DestinationZipCode(self): + return self.DestinationZipCode + def set_DestinationZipCode(self, DestinationZipCode): + self.DestinationZipCode = DestinationZipCode + def get_MailingDate(self): + return self.MailingDate + def set_MailingDate(self, MailingDate): + self.MailingDate = MailingDate + def get_ID(self): + return self.ID + def set_ID(self, ID): + self.ID = ID + def has__content(self): + if ( + self.DestinationZipCode is not None or + self.MailingDate is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackIDType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('TrackIDType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'TrackIDType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TrackIDType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TrackIDType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TrackIDType'): + if self.ID is not None and 'ID' not in already_processed: + already_processed.add('ID') + outfile.write(' ID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ID), input_name='ID')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackIDType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.DestinationZipCode is not None: + namespaceprefix_ = self.DestinationZipCode_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationZipCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationZipCode>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.DestinationZipCode, input_name='DestinationZipCode'), namespaceprefix_ , eol_)) + if self.MailingDate is not None: + namespaceprefix_ = self.MailingDate_nsprefix_ + ':' if (UseCapturedNS_ and self.MailingDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMailingDate>%s%s' % (namespaceprefix_ , self.gds_format_date(self.MailingDate, input_name='MailingDate'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ID', node) + if value is not None and 'ID' not in already_processed: + already_processed.add('ID') + self.ID = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'DestinationZipCode' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'DestinationZipCode') + ival_ = self.gds_validate_integer(ival_, node, 'DestinationZipCode') + self.DestinationZipCode = ival_ + self.DestinationZipCode_nsprefix_ = child_.prefix + elif nodeName_ == 'MailingDate': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.MailingDate = dval_ + self.MailingDate_nsprefix_ = child_.prefix +# end class TrackIDType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'TrackFieldRequest' + rootClass = TrackFieldRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'TrackFieldRequest' + rootClass = TrackFieldRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'TrackFieldRequest' + rootClass = TrackFieldRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'TrackFieldRequest' + rootClass = TrackFieldRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from track_field_request import *\n\n') + sys.stdout.write('import track_field_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "TrackFieldRequest", + "TrackIDType" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/track_request.py b/modules/connectors/usps/karrio/schemas/usps/track_request.py new file mode 100644 index 0000000000..09a3ec80c7 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/track_request.py @@ -0,0 +1,1432 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:46 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/track_request.py') +# +# Command line arguments: +# ./schemas/TrackRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/track_request.py" ./schemas/TrackRequest.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class TrackRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, TrackID=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + if TrackID is None: + self.TrackID = [] + else: + self.TrackID = TrackID + self.TrackID_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, TrackRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if TrackRequest.subclass: + return TrackRequest.subclass(*args_, **kwargs_) + else: + return TrackRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_TrackID(self): + return self.TrackID + def set_TrackID(self, TrackID): + self.TrackID = TrackID + def add_TrackID(self, value): + self.TrackID.append(value) + def insert_TrackID_at(self, index, value): + self.TrackID.insert(index, value) + def replace_TrackID_at(self, index, value): + self.TrackID[index] = value + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.TrackID + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('TrackRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'TrackRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TrackRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TrackRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TrackRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for TrackID_ in self.TrackID: + namespaceprefix_ = self.TrackID_nsprefix_ + ':' if (UseCapturedNS_ and self.TrackID_nsprefix_) else '' + TrackID_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TrackID', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'TrackID': + obj_ = TrackIDType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.TrackID.append(obj_) + obj_.original_tagname_ = 'TrackID' +# end class TrackRequest + + +class TrackIDType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ID=None, valueOf_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ID = _cast(None, ID) + self.ID_nsprefix_ = None + self.valueOf_ = valueOf_ + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, TrackIDType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if TrackIDType.subclass: + return TrackIDType.subclass(*args_, **kwargs_) + else: + return TrackIDType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ID(self): + return self.ID + def set_ID(self, ID): + self.ID = ID + def get_valueOf_(self): return self.valueOf_ + def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_ + def has__content(self): + if ( + (1 if type(self.valueOf_) in [int,float] else self.valueOf_) + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackIDType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('TrackIDType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'TrackIDType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TrackIDType') + outfile.write('>') + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_, pretty_print=pretty_print) + outfile.write(self.convert_unicode(self.valueOf_)) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TrackIDType'): + if self.ID is not None and 'ID' not in already_processed: + already_processed.add('ID') + outfile.write(' ID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ID), input_name='ID')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackIDType', fromsubclass_=False, pretty_print=True): + pass + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ID', node) + if value is not None and 'ID' not in already_processed: + already_processed.add('ID') + self.ID = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + pass +# end class TrackIDType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'TrackRequest' + rootClass = TrackRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'TrackRequest' + rootClass = TrackRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'TrackRequest' + rootClass = TrackRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'TrackRequest' + rootClass = TrackRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from track_request import *\n\n') + sys.stdout.write('import track_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "TrackIDType", + "TrackRequest" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/track_response.py b/modules/connectors/usps/karrio/schemas/usps/track_response.py new file mode 100644 index 0000000000..5c622f8630 --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/track_response.py @@ -0,0 +1,3349 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:47 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/track_response.py') +# +# Command line arguments: +# ./schemas/TrackResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/track_response.py" ./schemas/TrackResponse.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class TrackResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, TrackInfo=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if TrackInfo is None: + self.TrackInfo = [] + else: + self.TrackInfo = TrackInfo + self.TrackInfo_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, TrackResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if TrackResponse.subclass: + return TrackResponse.subclass(*args_, **kwargs_) + else: + return TrackResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_TrackInfo(self): + return self.TrackInfo + def set_TrackInfo(self, TrackInfo): + self.TrackInfo = TrackInfo + def add_TrackInfo(self, value): + self.TrackInfo.append(value) + def insert_TrackInfo_at(self, index, value): + self.TrackInfo.insert(index, value) + def replace_TrackInfo_at(self, index, value): + self.TrackInfo[index] = value + def has__content(self): + if ( + self.TrackInfo + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('TrackResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'TrackResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TrackResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TrackResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TrackResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for TrackInfo_ in self.TrackInfo: + namespaceprefix_ = self.TrackInfo_nsprefix_ + ':' if (UseCapturedNS_ and self.TrackInfo_nsprefix_) else '' + TrackInfo_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TrackInfo', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'TrackInfo': + obj_ = TrackInfoType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.TrackInfo.append(obj_) + obj_.original_tagname_ = 'TrackInfo' +# end class TrackResponse + + +class TrackInfoType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ID=None, AdditionalInfo=None, ADPScripting=None, ARCHDATA=None, ArchiveRestoreInfo=None, AssociatedLabel=None, Class=None, ClassOfMailCode=None, DeliveryNotificationDate=None, DestinationCity=None, DestinationCountryCode=None, DestinationState=None, DestinationZip=None, EditedLabelID=None, EmailEnabled=None, ExpectedDeliveryDate=None, ExpectedDeliveryTime=None, GuaranteedDeliveryDate=None, GuaranteedDeliveryTime=None, GuaranteedDetails=None, KahalaIndicator=None, MailTypeCode=None, MPDATE=None, MPSUFFIX=None, OriginCity=None, OriginCountryCode=None, OriginState=None, OriginZip=None, PodEnabled=None, PredictedDeliveryDate=None, PredictedDeliveryTime=None, PDWStart=None, PDWEnd=None, RelatedRRID=None, RestoreEnabled=None, RRAMenabled=None, RreEnabled=None, Service=None, ServiceTypeCode=None, Status=None, StatusCategory=None, StatusSummary=None, TABLECODE=None, TpodEnabled=None, ValueofArticle=None, EnabledNotificationRequests=None, TrackSummary=None, TrackDetail=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ID = _cast(None, ID) + self.ID_nsprefix_ = None + self.AdditionalInfo = AdditionalInfo + self.AdditionalInfo_nsprefix_ = None + self.ADPScripting = ADPScripting + self.ADPScripting_nsprefix_ = None + self.ARCHDATA = ARCHDATA + self.ARCHDATA_nsprefix_ = None + self.ArchiveRestoreInfo = ArchiveRestoreInfo + self.ArchiveRestoreInfo_nsprefix_ = None + self.AssociatedLabel = AssociatedLabel + self.AssociatedLabel_nsprefix_ = None + self.Class = Class + self.Class_nsprefix_ = None + self.ClassOfMailCode = ClassOfMailCode + self.ClassOfMailCode_nsprefix_ = None + self.DeliveryNotificationDate = DeliveryNotificationDate + self.DeliveryNotificationDate_nsprefix_ = None + self.DestinationCity = DestinationCity + self.DestinationCity_nsprefix_ = None + self.DestinationCountryCode = DestinationCountryCode + self.DestinationCountryCode_nsprefix_ = None + self.DestinationState = DestinationState + self.DestinationState_nsprefix_ = None + self.DestinationZip = DestinationZip + self.DestinationZip_nsprefix_ = None + self.EditedLabelID = EditedLabelID + self.EditedLabelID_nsprefix_ = None + self.EmailEnabled = EmailEnabled + self.EmailEnabled_nsprefix_ = None + self.ExpectedDeliveryDate = ExpectedDeliveryDate + self.ExpectedDeliveryDate_nsprefix_ = None + self.ExpectedDeliveryTime = ExpectedDeliveryTime + self.ExpectedDeliveryTime_nsprefix_ = None + self.GuaranteedDeliveryDate = GuaranteedDeliveryDate + self.GuaranteedDeliveryDate_nsprefix_ = None + self.GuaranteedDeliveryTime = GuaranteedDeliveryTime + self.GuaranteedDeliveryTime_nsprefix_ = None + self.GuaranteedDetails = GuaranteedDetails + self.GuaranteedDetails_nsprefix_ = None + self.KahalaIndicator = KahalaIndicator + self.KahalaIndicator_nsprefix_ = None + self.MailTypeCode = MailTypeCode + self.MailTypeCode_nsprefix_ = None + self.MPDATE = MPDATE + self.MPDATE_nsprefix_ = None + self.MPSUFFIX = MPSUFFIX + self.MPSUFFIX_nsprefix_ = None + self.OriginCity = OriginCity + self.OriginCity_nsprefix_ = None + self.OriginCountryCode = OriginCountryCode + self.OriginCountryCode_nsprefix_ = None + self.OriginState = OriginState + self.OriginState_nsprefix_ = None + self.OriginZip = OriginZip + self.OriginZip_nsprefix_ = None + self.PodEnabled = PodEnabled + self.PodEnabled_nsprefix_ = None + self.PredictedDeliveryDate = PredictedDeliveryDate + self.PredictedDeliveryDate_nsprefix_ = None + self.PredictedDeliveryTime = PredictedDeliveryTime + self.PredictedDeliveryTime_nsprefix_ = None + self.PDWStart = PDWStart + self.PDWStart_nsprefix_ = None + self.PDWEnd = PDWEnd + self.PDWEnd_nsprefix_ = None + self.RelatedRRID = RelatedRRID + self.RelatedRRID_nsprefix_ = None + self.RestoreEnabled = RestoreEnabled + self.RestoreEnabled_nsprefix_ = None + self.RRAMenabled = RRAMenabled + self.RRAMenabled_nsprefix_ = None + self.RreEnabled = RreEnabled + self.RreEnabled_nsprefix_ = None + self.Service = Service + self.Service_nsprefix_ = None + self.ServiceTypeCode = ServiceTypeCode + self.ServiceTypeCode_nsprefix_ = None + self.Status = Status + self.Status_nsprefix_ = None + self.StatusCategory = StatusCategory + self.StatusCategory_nsprefix_ = None + self.StatusSummary = StatusSummary + self.StatusSummary_nsprefix_ = None + self.TABLECODE = TABLECODE + self.TABLECODE_nsprefix_ = None + self.TpodEnabled = TpodEnabled + self.TpodEnabled_nsprefix_ = None + self.ValueofArticle = ValueofArticle + self.ValueofArticle_nsprefix_ = None + self.EnabledNotificationRequests = EnabledNotificationRequests + self.EnabledNotificationRequests_nsprefix_ = None + self.TrackSummary = TrackSummary + self.TrackSummary_nsprefix_ = None + if TrackDetail is None: + self.TrackDetail = [] + else: + self.TrackDetail = TrackDetail + self.TrackDetail_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, TrackInfoType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if TrackInfoType.subclass: + return TrackInfoType.subclass(*args_, **kwargs_) + else: + return TrackInfoType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_AdditionalInfo(self): + return self.AdditionalInfo + def set_AdditionalInfo(self, AdditionalInfo): + self.AdditionalInfo = AdditionalInfo + def get_ADPScripting(self): + return self.ADPScripting + def set_ADPScripting(self, ADPScripting): + self.ADPScripting = ADPScripting + def get_ARCHDATA(self): + return self.ARCHDATA + def set_ARCHDATA(self, ARCHDATA): + self.ARCHDATA = ARCHDATA + def get_ArchiveRestoreInfo(self): + return self.ArchiveRestoreInfo + def set_ArchiveRestoreInfo(self, ArchiveRestoreInfo): + self.ArchiveRestoreInfo = ArchiveRestoreInfo + def get_AssociatedLabel(self): + return self.AssociatedLabel + def set_AssociatedLabel(self, AssociatedLabel): + self.AssociatedLabel = AssociatedLabel + def get_Class(self): + return self.Class + def set_Class(self, Class): + self.Class = Class + def get_ClassOfMailCode(self): + return self.ClassOfMailCode + def set_ClassOfMailCode(self, ClassOfMailCode): + self.ClassOfMailCode = ClassOfMailCode + def get_DeliveryNotificationDate(self): + return self.DeliveryNotificationDate + def set_DeliveryNotificationDate(self, DeliveryNotificationDate): + self.DeliveryNotificationDate = DeliveryNotificationDate + def get_DestinationCity(self): + return self.DestinationCity + def set_DestinationCity(self, DestinationCity): + self.DestinationCity = DestinationCity + def get_DestinationCountryCode(self): + return self.DestinationCountryCode + def set_DestinationCountryCode(self, DestinationCountryCode): + self.DestinationCountryCode = DestinationCountryCode + def get_DestinationState(self): + return self.DestinationState + def set_DestinationState(self, DestinationState): + self.DestinationState = DestinationState + def get_DestinationZip(self): + return self.DestinationZip + def set_DestinationZip(self, DestinationZip): + self.DestinationZip = DestinationZip + def get_EditedLabelID(self): + return self.EditedLabelID + def set_EditedLabelID(self, EditedLabelID): + self.EditedLabelID = EditedLabelID + def get_EmailEnabled(self): + return self.EmailEnabled + def set_EmailEnabled(self, EmailEnabled): + self.EmailEnabled = EmailEnabled + def get_ExpectedDeliveryDate(self): + return self.ExpectedDeliveryDate + def set_ExpectedDeliveryDate(self, ExpectedDeliveryDate): + self.ExpectedDeliveryDate = ExpectedDeliveryDate + def get_ExpectedDeliveryTime(self): + return self.ExpectedDeliveryTime + def set_ExpectedDeliveryTime(self, ExpectedDeliveryTime): + self.ExpectedDeliveryTime = ExpectedDeliveryTime + def get_GuaranteedDeliveryDate(self): + return self.GuaranteedDeliveryDate + def set_GuaranteedDeliveryDate(self, GuaranteedDeliveryDate): + self.GuaranteedDeliveryDate = GuaranteedDeliveryDate + def get_GuaranteedDeliveryTime(self): + return self.GuaranteedDeliveryTime + def set_GuaranteedDeliveryTime(self, GuaranteedDeliveryTime): + self.GuaranteedDeliveryTime = GuaranteedDeliveryTime + def get_GuaranteedDetails(self): + return self.GuaranteedDetails + def set_GuaranteedDetails(self, GuaranteedDetails): + self.GuaranteedDetails = GuaranteedDetails + def get_KahalaIndicator(self): + return self.KahalaIndicator + def set_KahalaIndicator(self, KahalaIndicator): + self.KahalaIndicator = KahalaIndicator + def get_MailTypeCode(self): + return self.MailTypeCode + def set_MailTypeCode(self, MailTypeCode): + self.MailTypeCode = MailTypeCode + def get_MPDATE(self): + return self.MPDATE + def set_MPDATE(self, MPDATE): + self.MPDATE = MPDATE + def get_MPSUFFIX(self): + return self.MPSUFFIX + def set_MPSUFFIX(self, MPSUFFIX): + self.MPSUFFIX = MPSUFFIX + def get_OriginCity(self): + return self.OriginCity + def set_OriginCity(self, OriginCity): + self.OriginCity = OriginCity + def get_OriginCountryCode(self): + return self.OriginCountryCode + def set_OriginCountryCode(self, OriginCountryCode): + self.OriginCountryCode = OriginCountryCode + def get_OriginState(self): + return self.OriginState + def set_OriginState(self, OriginState): + self.OriginState = OriginState + def get_OriginZip(self): + return self.OriginZip + def set_OriginZip(self, OriginZip): + self.OriginZip = OriginZip + def get_PodEnabled(self): + return self.PodEnabled + def set_PodEnabled(self, PodEnabled): + self.PodEnabled = PodEnabled + def get_PredictedDeliveryDate(self): + return self.PredictedDeliveryDate + def set_PredictedDeliveryDate(self, PredictedDeliveryDate): + self.PredictedDeliveryDate = PredictedDeliveryDate + def get_PredictedDeliveryTime(self): + return self.PredictedDeliveryTime + def set_PredictedDeliveryTime(self, PredictedDeliveryTime): + self.PredictedDeliveryTime = PredictedDeliveryTime + def get_PDWStart(self): + return self.PDWStart + def set_PDWStart(self, PDWStart): + self.PDWStart = PDWStart + def get_PDWEnd(self): + return self.PDWEnd + def set_PDWEnd(self, PDWEnd): + self.PDWEnd = PDWEnd + def get_RelatedRRID(self): + return self.RelatedRRID + def set_RelatedRRID(self, RelatedRRID): + self.RelatedRRID = RelatedRRID + def get_RestoreEnabled(self): + return self.RestoreEnabled + def set_RestoreEnabled(self, RestoreEnabled): + self.RestoreEnabled = RestoreEnabled + def get_RRAMenabled(self): + return self.RRAMenabled + def set_RRAMenabled(self, RRAMenabled): + self.RRAMenabled = RRAMenabled + def get_RreEnabled(self): + return self.RreEnabled + def set_RreEnabled(self, RreEnabled): + self.RreEnabled = RreEnabled + def get_Service(self): + return self.Service + def set_Service(self, Service): + self.Service = Service + def get_ServiceTypeCode(self): + return self.ServiceTypeCode + def set_ServiceTypeCode(self, ServiceTypeCode): + self.ServiceTypeCode = ServiceTypeCode + def get_Status(self): + return self.Status + def set_Status(self, Status): + self.Status = Status + def get_StatusCategory(self): + return self.StatusCategory + def set_StatusCategory(self, StatusCategory): + self.StatusCategory = StatusCategory + def get_StatusSummary(self): + return self.StatusSummary + def set_StatusSummary(self, StatusSummary): + self.StatusSummary = StatusSummary + def get_TABLECODE(self): + return self.TABLECODE + def set_TABLECODE(self, TABLECODE): + self.TABLECODE = TABLECODE + def get_TpodEnabled(self): + return self.TpodEnabled + def set_TpodEnabled(self, TpodEnabled): + self.TpodEnabled = TpodEnabled + def get_ValueofArticle(self): + return self.ValueofArticle + def set_ValueofArticle(self, ValueofArticle): + self.ValueofArticle = ValueofArticle + def get_EnabledNotificationRequests(self): + return self.EnabledNotificationRequests + def set_EnabledNotificationRequests(self, EnabledNotificationRequests): + self.EnabledNotificationRequests = EnabledNotificationRequests + def get_TrackSummary(self): + return self.TrackSummary + def set_TrackSummary(self, TrackSummary): + self.TrackSummary = TrackSummary + def get_TrackDetail(self): + return self.TrackDetail + def set_TrackDetail(self, TrackDetail): + self.TrackDetail = TrackDetail + def add_TrackDetail(self, value): + self.TrackDetail.append(value) + def insert_TrackDetail_at(self, index, value): + self.TrackDetail.insert(index, value) + def replace_TrackDetail_at(self, index, value): + self.TrackDetail[index] = value + def get_ID(self): + return self.ID + def set_ID(self, ID): + self.ID = ID + def has__content(self): + if ( + self.AdditionalInfo is not None or + self.ADPScripting is not None or + self.ARCHDATA is not None or + self.ArchiveRestoreInfo is not None or + self.AssociatedLabel is not None or + self.Class is not None or + self.ClassOfMailCode is not None or + self.DeliveryNotificationDate is not None or + self.DestinationCity is not None or + self.DestinationCountryCode is not None or + self.DestinationState is not None or + self.DestinationZip is not None or + self.EditedLabelID is not None or + self.EmailEnabled is not None or + self.ExpectedDeliveryDate is not None or + self.ExpectedDeliveryTime is not None or + self.GuaranteedDeliveryDate is not None or + self.GuaranteedDeliveryTime is not None or + self.GuaranteedDetails is not None or + self.KahalaIndicator is not None or + self.MailTypeCode is not None or + self.MPDATE is not None or + self.MPSUFFIX is not None or + self.OriginCity is not None or + self.OriginCountryCode is not None or + self.OriginState is not None or + self.OriginZip is not None or + self.PodEnabled is not None or + self.PredictedDeliveryDate is not None or + self.PredictedDeliveryTime is not None or + self.PDWStart is not None or + self.PDWEnd is not None or + self.RelatedRRID is not None or + self.RestoreEnabled is not None or + self.RRAMenabled is not None or + self.RreEnabled is not None or + self.Service is not None or + self.ServiceTypeCode is not None or + self.Status is not None or + self.StatusCategory is not None or + self.StatusSummary is not None or + self.TABLECODE is not None or + self.TpodEnabled is not None or + self.ValueofArticle is not None or + self.EnabledNotificationRequests is not None or + self.TrackSummary is not None or + self.TrackDetail + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackInfoType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('TrackInfoType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'TrackInfoType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TrackInfoType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TrackInfoType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TrackInfoType'): + if self.ID is not None and 'ID' not in already_processed: + already_processed.add('ID') + outfile.write(' ID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ID), input_name='ID')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackInfoType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.AdditionalInfo is not None: + namespaceprefix_ = self.AdditionalInfo_nsprefix_ + ':' if (UseCapturedNS_ and self.AdditionalInfo_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAdditionalInfo>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AdditionalInfo), input_name='AdditionalInfo')), namespaceprefix_ , eol_)) + if self.ADPScripting is not None: + namespaceprefix_ = self.ADPScripting_nsprefix_ + ':' if (UseCapturedNS_ and self.ADPScripting_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sADPScripting>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ADPScripting), input_name='ADPScripting')), namespaceprefix_ , eol_)) + if self.ARCHDATA is not None: + namespaceprefix_ = self.ARCHDATA_nsprefix_ + ':' if (UseCapturedNS_ and self.ARCHDATA_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sARCHDATA>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.ARCHDATA, input_name='ARCHDATA'), namespaceprefix_ , eol_)) + if self.ArchiveRestoreInfo is not None: + namespaceprefix_ = self.ArchiveRestoreInfo_nsprefix_ + ':' if (UseCapturedNS_ and self.ArchiveRestoreInfo_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sArchiveRestoreInfo>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ArchiveRestoreInfo), input_name='ArchiveRestoreInfo')), namespaceprefix_ , eol_)) + if self.AssociatedLabel is not None: + namespaceprefix_ = self.AssociatedLabel_nsprefix_ + ':' if (UseCapturedNS_ and self.AssociatedLabel_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAssociatedLabel>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AssociatedLabel), input_name='AssociatedLabel')), namespaceprefix_ , eol_)) + if self.Class is not None: + namespaceprefix_ = self.Class_nsprefix_ + ':' if (UseCapturedNS_ and self.Class_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sClass>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Class), input_name='Class')), namespaceprefix_ , eol_)) + if self.ClassOfMailCode is not None: + namespaceprefix_ = self.ClassOfMailCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ClassOfMailCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sClassOfMailCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ClassOfMailCode), input_name='ClassOfMailCode')), namespaceprefix_ , eol_)) + if self.DeliveryNotificationDate is not None: + namespaceprefix_ = self.DeliveryNotificationDate_nsprefix_ + ':' if (UseCapturedNS_ and self.DeliveryNotificationDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDeliveryNotificationDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DeliveryNotificationDate), input_name='DeliveryNotificationDate')), namespaceprefix_ , eol_)) + if self.DestinationCity is not None: + namespaceprefix_ = self.DestinationCity_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DestinationCity), input_name='DestinationCity')), namespaceprefix_ , eol_)) + if self.DestinationCountryCode is not None: + namespaceprefix_ = self.DestinationCountryCode_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationCountryCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationCountryCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DestinationCountryCode), input_name='DestinationCountryCode')), namespaceprefix_ , eol_)) + if self.DestinationState is not None: + namespaceprefix_ = self.DestinationState_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DestinationState), input_name='DestinationState')), namespaceprefix_ , eol_)) + if self.DestinationZip is not None: + namespaceprefix_ = self.DestinationZip_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.DestinationZip, input_name='DestinationZip'), namespaceprefix_ , eol_)) + if self.EditedLabelID is not None: + namespaceprefix_ = self.EditedLabelID_nsprefix_ + ':' if (UseCapturedNS_ and self.EditedLabelID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEditedLabelID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EditedLabelID), input_name='EditedLabelID')), namespaceprefix_ , eol_)) + if self.EmailEnabled is not None: + namespaceprefix_ = self.EmailEnabled_nsprefix_ + ':' if (UseCapturedNS_ and self.EmailEnabled_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEmailEnabled>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EmailEnabled), input_name='EmailEnabled')), namespaceprefix_ , eol_)) + if self.ExpectedDeliveryDate is not None: + namespaceprefix_ = self.ExpectedDeliveryDate_nsprefix_ + ':' if (UseCapturedNS_ and self.ExpectedDeliveryDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sExpectedDeliveryDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ExpectedDeliveryDate), input_name='ExpectedDeliveryDate')), namespaceprefix_ , eol_)) + if self.ExpectedDeliveryTime is not None: + namespaceprefix_ = self.ExpectedDeliveryTime_nsprefix_ + ':' if (UseCapturedNS_ and self.ExpectedDeliveryTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sExpectedDeliveryTime>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ExpectedDeliveryTime), input_name='ExpectedDeliveryTime')), namespaceprefix_ , eol_)) + if self.GuaranteedDeliveryDate is not None: + namespaceprefix_ = self.GuaranteedDeliveryDate_nsprefix_ + ':' if (UseCapturedNS_ and self.GuaranteedDeliveryDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGuaranteedDeliveryDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.GuaranteedDeliveryDate), input_name='GuaranteedDeliveryDate')), namespaceprefix_ , eol_)) + if self.GuaranteedDeliveryTime is not None: + namespaceprefix_ = self.GuaranteedDeliveryTime_nsprefix_ + ':' if (UseCapturedNS_ and self.GuaranteedDeliveryTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGuaranteedDeliveryTime>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.GuaranteedDeliveryTime), input_name='GuaranteedDeliveryTime')), namespaceprefix_ , eol_)) + if self.GuaranteedDetails is not None: + namespaceprefix_ = self.GuaranteedDetails_nsprefix_ + ':' if (UseCapturedNS_ and self.GuaranteedDetails_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGuaranteedDetails>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.GuaranteedDetails), input_name='GuaranteedDetails')), namespaceprefix_ , eol_)) + if self.KahalaIndicator is not None: + namespaceprefix_ = self.KahalaIndicator_nsprefix_ + ':' if (UseCapturedNS_ and self.KahalaIndicator_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sKahalaIndicator>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.KahalaIndicator), input_name='KahalaIndicator')), namespaceprefix_ , eol_)) + if self.MailTypeCode is not None: + namespaceprefix_ = self.MailTypeCode_nsprefix_ + ':' if (UseCapturedNS_ and self.MailTypeCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMailTypeCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MailTypeCode), input_name='MailTypeCode')), namespaceprefix_ , eol_)) + if self.MPDATE is not None: + namespaceprefix_ = self.MPDATE_nsprefix_ + ':' if (UseCapturedNS_ and self.MPDATE_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMPDATE>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MPDATE), input_name='MPDATE')), namespaceprefix_ , eol_)) + if self.MPSUFFIX is not None: + namespaceprefix_ = self.MPSUFFIX_nsprefix_ + ':' if (UseCapturedNS_ and self.MPSUFFIX_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMPSUFFIX>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.MPSUFFIX, input_name='MPSUFFIX'), namespaceprefix_ , eol_)) + if self.OriginCity is not None: + namespaceprefix_ = self.OriginCity_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.OriginCity), input_name='OriginCity')), namespaceprefix_ , eol_)) + if self.OriginCountryCode is not None: + namespaceprefix_ = self.OriginCountryCode_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginCountryCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginCountryCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.OriginCountryCode), input_name='OriginCountryCode')), namespaceprefix_ , eol_)) + if self.OriginState is not None: + namespaceprefix_ = self.OriginState_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.OriginState), input_name='OriginState')), namespaceprefix_ , eol_)) + if self.OriginZip is not None: + namespaceprefix_ = self.OriginZip_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginZip>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.OriginZip), input_name='OriginZip')), namespaceprefix_ , eol_)) + if self.PodEnabled is not None: + namespaceprefix_ = self.PodEnabled_nsprefix_ + ':' if (UseCapturedNS_ and self.PodEnabled_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPodEnabled>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PodEnabled), input_name='PodEnabled')), namespaceprefix_ , eol_)) + if self.PredictedDeliveryDate is not None: + namespaceprefix_ = self.PredictedDeliveryDate_nsprefix_ + ':' if (UseCapturedNS_ and self.PredictedDeliveryDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPredictedDeliveryDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PredictedDeliveryDate), input_name='PredictedDeliveryDate')), namespaceprefix_ , eol_)) + if self.PredictedDeliveryTime is not None: + namespaceprefix_ = self.PredictedDeliveryTime_nsprefix_ + ':' if (UseCapturedNS_ and self.PredictedDeliveryTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPredictedDeliveryTime>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PredictedDeliveryTime), input_name='PredictedDeliveryTime')), namespaceprefix_ , eol_)) + if self.PDWStart is not None: + namespaceprefix_ = self.PDWStart_nsprefix_ + ':' if (UseCapturedNS_ and self.PDWStart_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPDWStart>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PDWStart), input_name='PDWStart')), namespaceprefix_ , eol_)) + if self.PDWEnd is not None: + namespaceprefix_ = self.PDWEnd_nsprefix_ + ':' if (UseCapturedNS_ and self.PDWEnd_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPDWEnd>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PDWEnd), input_name='PDWEnd')), namespaceprefix_ , eol_)) + if self.RelatedRRID is not None: + namespaceprefix_ = self.RelatedRRID_nsprefix_ + ':' if (UseCapturedNS_ and self.RelatedRRID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRelatedRRID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RelatedRRID), input_name='RelatedRRID')), namespaceprefix_ , eol_)) + if self.RestoreEnabled is not None: + namespaceprefix_ = self.RestoreEnabled_nsprefix_ + ':' if (UseCapturedNS_ and self.RestoreEnabled_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRestoreEnabled>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.RestoreEnabled, input_name='RestoreEnabled'), namespaceprefix_ , eol_)) + if self.RRAMenabled is not None: + namespaceprefix_ = self.RRAMenabled_nsprefix_ + ':' if (UseCapturedNS_ and self.RRAMenabled_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRRAMenabled>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.RRAMenabled, input_name='RRAMenabled'), namespaceprefix_ , eol_)) + if self.RreEnabled is not None: + namespaceprefix_ = self.RreEnabled_nsprefix_ + ':' if (UseCapturedNS_ and self.RreEnabled_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRreEnabled>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.RreEnabled, input_name='RreEnabled'), namespaceprefix_ , eol_)) + if self.Service is not None: + namespaceprefix_ = self.Service_nsprefix_ + ':' if (UseCapturedNS_ and self.Service_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sService>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Service), input_name='Service')), namespaceprefix_ , eol_)) + if self.ServiceTypeCode is not None: + namespaceprefix_ = self.ServiceTypeCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceTypeCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceTypeCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceTypeCode), input_name='ServiceTypeCode')), namespaceprefix_ , eol_)) + if self.Status is not None: + namespaceprefix_ = self.Status_nsprefix_ + ':' if (UseCapturedNS_ and self.Status_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sStatus>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Status), input_name='Status')), namespaceprefix_ , eol_)) + if self.StatusCategory is not None: + namespaceprefix_ = self.StatusCategory_nsprefix_ + ':' if (UseCapturedNS_ and self.StatusCategory_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sStatusCategory>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.StatusCategory), input_name='StatusCategory')), namespaceprefix_ , eol_)) + if self.StatusSummary is not None: + namespaceprefix_ = self.StatusSummary_nsprefix_ + ':' if (UseCapturedNS_ and self.StatusSummary_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sStatusSummary>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.StatusSummary), input_name='StatusSummary')), namespaceprefix_ , eol_)) + if self.TABLECODE is not None: + namespaceprefix_ = self.TABLECODE_nsprefix_ + ':' if (UseCapturedNS_ and self.TABLECODE_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTABLECODE>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TABLECODE), input_name='TABLECODE')), namespaceprefix_ , eol_)) + if self.TpodEnabled is not None: + namespaceprefix_ = self.TpodEnabled_nsprefix_ + ':' if (UseCapturedNS_ and self.TpodEnabled_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTpodEnabled>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.TpodEnabled, input_name='TpodEnabled'), namespaceprefix_ , eol_)) + if self.ValueofArticle is not None: + namespaceprefix_ = self.ValueofArticle_nsprefix_ + ':' if (UseCapturedNS_ and self.ValueofArticle_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sValueofArticle>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ValueofArticle), input_name='ValueofArticle')), namespaceprefix_ , eol_)) + if self.EnabledNotificationRequests is not None: + namespaceprefix_ = self.EnabledNotificationRequests_nsprefix_ + ':' if (UseCapturedNS_ and self.EnabledNotificationRequests_nsprefix_) else '' + self.EnabledNotificationRequests.export(outfile, level, namespaceprefix_, namespacedef_='', name_='EnabledNotificationRequests', pretty_print=pretty_print) + if self.TrackSummary is not None: + namespaceprefix_ = self.TrackSummary_nsprefix_ + ':' if (UseCapturedNS_ and self.TrackSummary_nsprefix_) else '' + self.TrackSummary.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TrackSummary', pretty_print=pretty_print) + for TrackDetail_ in self.TrackDetail: + namespaceprefix_ = self.TrackDetail_nsprefix_ + ':' if (UseCapturedNS_ and self.TrackDetail_nsprefix_) else '' + TrackDetail_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TrackDetail', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ID', node) + if value is not None and 'ID' not in already_processed: + already_processed.add('ID') + self.ID = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'AdditionalInfo': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AdditionalInfo') + value_ = self.gds_validate_string(value_, node, 'AdditionalInfo') + self.AdditionalInfo = value_ + self.AdditionalInfo_nsprefix_ = child_.prefix + elif nodeName_ == 'ADPScripting': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ADPScripting') + value_ = self.gds_validate_string(value_, node, 'ADPScripting') + self.ADPScripting = value_ + self.ADPScripting_nsprefix_ = child_.prefix + elif nodeName_ == 'ARCHDATA': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'ARCHDATA') + ival_ = self.gds_validate_boolean(ival_, node, 'ARCHDATA') + self.ARCHDATA = ival_ + self.ARCHDATA_nsprefix_ = child_.prefix + elif nodeName_ == 'ArchiveRestoreInfo': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ArchiveRestoreInfo') + value_ = self.gds_validate_string(value_, node, 'ArchiveRestoreInfo') + self.ArchiveRestoreInfo = value_ + self.ArchiveRestoreInfo_nsprefix_ = child_.prefix + elif nodeName_ == 'AssociatedLabel': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AssociatedLabel') + value_ = self.gds_validate_string(value_, node, 'AssociatedLabel') + self.AssociatedLabel = value_ + self.AssociatedLabel_nsprefix_ = child_.prefix + elif nodeName_ == 'Class': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Class') + value_ = self.gds_validate_string(value_, node, 'Class') + self.Class = value_ + self.Class_nsprefix_ = child_.prefix + elif nodeName_ == 'ClassOfMailCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ClassOfMailCode') + value_ = self.gds_validate_string(value_, node, 'ClassOfMailCode') + self.ClassOfMailCode = value_ + self.ClassOfMailCode_nsprefix_ = child_.prefix + elif nodeName_ == 'DeliveryNotificationDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DeliveryNotificationDate') + value_ = self.gds_validate_string(value_, node, 'DeliveryNotificationDate') + self.DeliveryNotificationDate = value_ + self.DeliveryNotificationDate_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DestinationCity') + value_ = self.gds_validate_string(value_, node, 'DestinationCity') + self.DestinationCity = value_ + self.DestinationCity_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationCountryCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DestinationCountryCode') + value_ = self.gds_validate_string(value_, node, 'DestinationCountryCode') + self.DestinationCountryCode = value_ + self.DestinationCountryCode_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DestinationState') + value_ = self.gds_validate_string(value_, node, 'DestinationState') + self.DestinationState = value_ + self.DestinationState_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationZip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'DestinationZip') + ival_ = self.gds_validate_integer(ival_, node, 'DestinationZip') + self.DestinationZip = ival_ + self.DestinationZip_nsprefix_ = child_.prefix + elif nodeName_ == 'EditedLabelID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EditedLabelID') + value_ = self.gds_validate_string(value_, node, 'EditedLabelID') + self.EditedLabelID = value_ + self.EditedLabelID_nsprefix_ = child_.prefix + elif nodeName_ == 'EmailEnabled': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EmailEnabled') + value_ = self.gds_validate_string(value_, node, 'EmailEnabled') + self.EmailEnabled = value_ + self.EmailEnabled_nsprefix_ = child_.prefix + elif nodeName_ == 'ExpectedDeliveryDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ExpectedDeliveryDate') + value_ = self.gds_validate_string(value_, node, 'ExpectedDeliveryDate') + self.ExpectedDeliveryDate = value_ + self.ExpectedDeliveryDate_nsprefix_ = child_.prefix + elif nodeName_ == 'ExpectedDeliveryTime': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ExpectedDeliveryTime') + value_ = self.gds_validate_string(value_, node, 'ExpectedDeliveryTime') + self.ExpectedDeliveryTime = value_ + self.ExpectedDeliveryTime_nsprefix_ = child_.prefix + elif nodeName_ == 'GuaranteedDeliveryDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'GuaranteedDeliveryDate') + value_ = self.gds_validate_string(value_, node, 'GuaranteedDeliveryDate') + self.GuaranteedDeliveryDate = value_ + self.GuaranteedDeliveryDate_nsprefix_ = child_.prefix + elif nodeName_ == 'GuaranteedDeliveryTime': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'GuaranteedDeliveryTime') + value_ = self.gds_validate_string(value_, node, 'GuaranteedDeliveryTime') + self.GuaranteedDeliveryTime = value_ + self.GuaranteedDeliveryTime_nsprefix_ = child_.prefix + elif nodeName_ == 'GuaranteedDetails': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'GuaranteedDetails') + value_ = self.gds_validate_string(value_, node, 'GuaranteedDetails') + self.GuaranteedDetails = value_ + self.GuaranteedDetails_nsprefix_ = child_.prefix + elif nodeName_ == 'KahalaIndicator': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'KahalaIndicator') + value_ = self.gds_validate_string(value_, node, 'KahalaIndicator') + self.KahalaIndicator = value_ + self.KahalaIndicator_nsprefix_ = child_.prefix + elif nodeName_ == 'MailTypeCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'MailTypeCode') + value_ = self.gds_validate_string(value_, node, 'MailTypeCode') + self.MailTypeCode = value_ + self.MailTypeCode_nsprefix_ = child_.prefix + elif nodeName_ == 'MPDATE': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'MPDATE') + value_ = self.gds_validate_string(value_, node, 'MPDATE') + self.MPDATE = value_ + self.MPDATE_nsprefix_ = child_.prefix + elif nodeName_ == 'MPSUFFIX' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'MPSUFFIX') + ival_ = self.gds_validate_integer(ival_, node, 'MPSUFFIX') + self.MPSUFFIX = ival_ + self.MPSUFFIX_nsprefix_ = child_.prefix + elif nodeName_ == 'OriginCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'OriginCity') + value_ = self.gds_validate_string(value_, node, 'OriginCity') + self.OriginCity = value_ + self.OriginCity_nsprefix_ = child_.prefix + elif nodeName_ == 'OriginCountryCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'OriginCountryCode') + value_ = self.gds_validate_string(value_, node, 'OriginCountryCode') + self.OriginCountryCode = value_ + self.OriginCountryCode_nsprefix_ = child_.prefix + elif nodeName_ == 'OriginState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'OriginState') + value_ = self.gds_validate_string(value_, node, 'OriginState') + self.OriginState = value_ + self.OriginState_nsprefix_ = child_.prefix + elif nodeName_ == 'OriginZip': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'OriginZip') + value_ = self.gds_validate_string(value_, node, 'OriginZip') + self.OriginZip = value_ + self.OriginZip_nsprefix_ = child_.prefix + elif nodeName_ == 'PodEnabled': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PodEnabled') + value_ = self.gds_validate_string(value_, node, 'PodEnabled') + self.PodEnabled = value_ + self.PodEnabled_nsprefix_ = child_.prefix + elif nodeName_ == 'PredictedDeliveryDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PredictedDeliveryDate') + value_ = self.gds_validate_string(value_, node, 'PredictedDeliveryDate') + self.PredictedDeliveryDate = value_ + self.PredictedDeliveryDate_nsprefix_ = child_.prefix + elif nodeName_ == 'PredictedDeliveryTime': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PredictedDeliveryTime') + value_ = self.gds_validate_string(value_, node, 'PredictedDeliveryTime') + self.PredictedDeliveryTime = value_ + self.PredictedDeliveryTime_nsprefix_ = child_.prefix + elif nodeName_ == 'PDWStart': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PDWStart') + value_ = self.gds_validate_string(value_, node, 'PDWStart') + self.PDWStart = value_ + self.PDWStart_nsprefix_ = child_.prefix + elif nodeName_ == 'PDWEnd': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PDWEnd') + value_ = self.gds_validate_string(value_, node, 'PDWEnd') + self.PDWEnd = value_ + self.PDWEnd_nsprefix_ = child_.prefix + elif nodeName_ == 'RelatedRRID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RelatedRRID') + value_ = self.gds_validate_string(value_, node, 'RelatedRRID') + self.RelatedRRID = value_ + self.RelatedRRID_nsprefix_ = child_.prefix + elif nodeName_ == 'RestoreEnabled': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'RestoreEnabled') + ival_ = self.gds_validate_boolean(ival_, node, 'RestoreEnabled') + self.RestoreEnabled = ival_ + self.RestoreEnabled_nsprefix_ = child_.prefix + elif nodeName_ == 'RRAMenabled': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'RRAMenabled') + ival_ = self.gds_validate_boolean(ival_, node, 'RRAMenabled') + self.RRAMenabled = ival_ + self.RRAMenabled_nsprefix_ = child_.prefix + elif nodeName_ == 'RreEnabled': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'RreEnabled') + ival_ = self.gds_validate_boolean(ival_, node, 'RreEnabled') + self.RreEnabled = ival_ + self.RreEnabled_nsprefix_ = child_.prefix + elif nodeName_ == 'Service': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Service') + value_ = self.gds_validate_string(value_, node, 'Service') + self.Service = value_ + self.Service_nsprefix_ = child_.prefix + elif nodeName_ == 'ServiceTypeCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceTypeCode') + value_ = self.gds_validate_string(value_, node, 'ServiceTypeCode') + self.ServiceTypeCode = value_ + self.ServiceTypeCode_nsprefix_ = child_.prefix + elif nodeName_ == 'Status': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Status') + value_ = self.gds_validate_string(value_, node, 'Status') + self.Status = value_ + self.Status_nsprefix_ = child_.prefix + elif nodeName_ == 'StatusCategory': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'StatusCategory') + value_ = self.gds_validate_string(value_, node, 'StatusCategory') + self.StatusCategory = value_ + self.StatusCategory_nsprefix_ = child_.prefix + elif nodeName_ == 'StatusSummary': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'StatusSummary') + value_ = self.gds_validate_string(value_, node, 'StatusSummary') + self.StatusSummary = value_ + self.StatusSummary_nsprefix_ = child_.prefix + elif nodeName_ == 'TABLECODE': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'TABLECODE') + value_ = self.gds_validate_string(value_, node, 'TABLECODE') + self.TABLECODE = value_ + self.TABLECODE_nsprefix_ = child_.prefix + elif nodeName_ == 'TpodEnabled': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'TpodEnabled') + ival_ = self.gds_validate_boolean(ival_, node, 'TpodEnabled') + self.TpodEnabled = ival_ + self.TpodEnabled_nsprefix_ = child_.prefix + elif nodeName_ == 'ValueofArticle': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ValueofArticle') + value_ = self.gds_validate_string(value_, node, 'ValueofArticle') + self.ValueofArticle = value_ + self.ValueofArticle_nsprefix_ = child_.prefix + elif nodeName_ == 'EnabledNotificationRequests': + obj_ = EnabledNotificationRequestsType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.EnabledNotificationRequests = obj_ + obj_.original_tagname_ = 'EnabledNotificationRequests' + elif nodeName_ == 'TrackSummary': + obj_ = TrackSummaryType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.TrackSummary = obj_ + obj_.original_tagname_ = 'TrackSummary' + elif nodeName_ == 'TrackDetail': + obj_ = TrackDetailType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.TrackDetail.append(obj_) + obj_.original_tagname_ = 'TrackDetail' +# end class TrackInfoType + + +class EnabledNotificationRequestsType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, SMS=None, EMAIL=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.SMS = SMS + self.SMS_nsprefix_ = None + self.EMAIL = EMAIL + self.EMAIL_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, EnabledNotificationRequestsType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if EnabledNotificationRequestsType.subclass: + return EnabledNotificationRequestsType.subclass(*args_, **kwargs_) + else: + return EnabledNotificationRequestsType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_SMS(self): + return self.SMS + def set_SMS(self, SMS): + self.SMS = SMS + def get_EMAIL(self): + return self.EMAIL + def set_EMAIL(self, EMAIL): + self.EMAIL = EMAIL + def has__content(self): + if ( + self.SMS is not None or + self.EMAIL is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='EnabledNotificationRequestsType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('EnabledNotificationRequestsType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'EnabledNotificationRequestsType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='EnabledNotificationRequestsType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='EnabledNotificationRequestsType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='EnabledNotificationRequestsType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='EnabledNotificationRequestsType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.SMS is not None: + namespaceprefix_ = self.SMS_nsprefix_ + ':' if (UseCapturedNS_ and self.SMS_nsprefix_) else '' + self.SMS.export(outfile, level, namespaceprefix_, namespacedef_='', name_='SMS', pretty_print=pretty_print) + if self.EMAIL is not None: + namespaceprefix_ = self.EMAIL_nsprefix_ + ':' if (UseCapturedNS_ and self.EMAIL_nsprefix_) else '' + self.EMAIL.export(outfile, level, namespaceprefix_, namespacedef_='', name_='EMAIL', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'SMS': + obj_ = SMSType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.SMS = obj_ + obj_.original_tagname_ = 'SMS' + elif nodeName_ == 'EMAIL': + obj_ = EMAILType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.EMAIL = obj_ + obj_.original_tagname_ = 'EMAIL' +# end class EnabledNotificationRequestsType + + +class SMSType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, FD=None, AL=None, TD=None, UP=None, DND=None, FS=None, OA=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.FD = FD + self.FD_nsprefix_ = None + self.AL = AL + self.AL_nsprefix_ = None + self.TD = TD + self.TD_nsprefix_ = None + self.UP = UP + self.UP_nsprefix_ = None + self.DND = DND + self.DND_nsprefix_ = None + self.FS = FS + self.FS_nsprefix_ = None + self.OA = OA + self.OA_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, SMSType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if SMSType.subclass: + return SMSType.subclass(*args_, **kwargs_) + else: + return SMSType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FD(self): + return self.FD + def set_FD(self, FD): + self.FD = FD + def get_AL(self): + return self.AL + def set_AL(self, AL): + self.AL = AL + def get_TD(self): + return self.TD + def set_TD(self, TD): + self.TD = TD + def get_UP(self): + return self.UP + def set_UP(self, UP): + self.UP = UP + def get_DND(self): + return self.DND + def set_DND(self, DND): + self.DND = DND + def get_FS(self): + return self.FS + def set_FS(self, FS): + self.FS = FS + def get_OA(self): + return self.OA + def set_OA(self, OA): + self.OA = OA + def has__content(self): + if ( + self.FD is not None or + self.AL is not None or + self.TD is not None or + self.UP is not None or + self.DND is not None or + self.FS is not None or + self.OA is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SMSType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('SMSType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'SMSType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SMSType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SMSType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SMSType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SMSType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FD is not None: + namespaceprefix_ = self.FD_nsprefix_ + ':' if (UseCapturedNS_ and self.FD_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFD>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.FD, input_name='FD'), namespaceprefix_ , eol_)) + if self.AL is not None: + namespaceprefix_ = self.AL_nsprefix_ + ':' if (UseCapturedNS_ and self.AL_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAL>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.AL, input_name='AL'), namespaceprefix_ , eol_)) + if self.TD is not None: + namespaceprefix_ = self.TD_nsprefix_ + ':' if (UseCapturedNS_ and self.TD_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTD>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.TD, input_name='TD'), namespaceprefix_ , eol_)) + if self.UP is not None: + namespaceprefix_ = self.UP_nsprefix_ + ':' if (UseCapturedNS_ and self.UP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUP>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.UP, input_name='UP'), namespaceprefix_ , eol_)) + if self.DND is not None: + namespaceprefix_ = self.DND_nsprefix_ + ':' if (UseCapturedNS_ and self.DND_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDND>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.DND, input_name='DND'), namespaceprefix_ , eol_)) + if self.FS is not None: + namespaceprefix_ = self.FS_nsprefix_ + ':' if (UseCapturedNS_ and self.FS_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFS>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.FS, input_name='FS'), namespaceprefix_ , eol_)) + if self.OA is not None: + namespaceprefix_ = self.OA_nsprefix_ + ':' if (UseCapturedNS_ and self.OA_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOA>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.OA, input_name='OA'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FD': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'FD') + ival_ = self.gds_validate_boolean(ival_, node, 'FD') + self.FD = ival_ + self.FD_nsprefix_ = child_.prefix + elif nodeName_ == 'AL': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'AL') + ival_ = self.gds_validate_boolean(ival_, node, 'AL') + self.AL = ival_ + self.AL_nsprefix_ = child_.prefix + elif nodeName_ == 'TD': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'TD') + ival_ = self.gds_validate_boolean(ival_, node, 'TD') + self.TD = ival_ + self.TD_nsprefix_ = child_.prefix + elif nodeName_ == 'UP': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'UP') + ival_ = self.gds_validate_boolean(ival_, node, 'UP') + self.UP = ival_ + self.UP_nsprefix_ = child_.prefix + elif nodeName_ == 'DND': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'DND') + ival_ = self.gds_validate_boolean(ival_, node, 'DND') + self.DND = ival_ + self.DND_nsprefix_ = child_.prefix + elif nodeName_ == 'FS': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'FS') + ival_ = self.gds_validate_boolean(ival_, node, 'FS') + self.FS = ival_ + self.FS_nsprefix_ = child_.prefix + elif nodeName_ == 'OA': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'OA') + ival_ = self.gds_validate_boolean(ival_, node, 'OA') + self.OA = ival_ + self.OA_nsprefix_ = child_.prefix +# end class SMSType + + +class EMAILType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, FD=None, AL=None, TD=None, UP=None, DND=None, FS=None, OA=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.FD = FD + self.FD_nsprefix_ = None + self.AL = AL + self.AL_nsprefix_ = None + self.TD = TD + self.TD_nsprefix_ = None + self.UP = UP + self.UP_nsprefix_ = None + self.DND = DND + self.DND_nsprefix_ = None + self.FS = FS + self.FS_nsprefix_ = None + self.OA = OA + self.OA_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, EMAILType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if EMAILType.subclass: + return EMAILType.subclass(*args_, **kwargs_) + else: + return EMAILType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FD(self): + return self.FD + def set_FD(self, FD): + self.FD = FD + def get_AL(self): + return self.AL + def set_AL(self, AL): + self.AL = AL + def get_TD(self): + return self.TD + def set_TD(self, TD): + self.TD = TD + def get_UP(self): + return self.UP + def set_UP(self, UP): + self.UP = UP + def get_DND(self): + return self.DND + def set_DND(self, DND): + self.DND = DND + def get_FS(self): + return self.FS + def set_FS(self, FS): + self.FS = FS + def get_OA(self): + return self.OA + def set_OA(self, OA): + self.OA = OA + def has__content(self): + if ( + self.FD is not None or + self.AL is not None or + self.TD is not None or + self.UP is not None or + self.DND is not None or + self.FS is not None or + self.OA is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='EMAILType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('EMAILType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'EMAILType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='EMAILType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='EMAILType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='EMAILType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='EMAILType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FD is not None: + namespaceprefix_ = self.FD_nsprefix_ + ':' if (UseCapturedNS_ and self.FD_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFD>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.FD, input_name='FD'), namespaceprefix_ , eol_)) + if self.AL is not None: + namespaceprefix_ = self.AL_nsprefix_ + ':' if (UseCapturedNS_ and self.AL_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAL>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.AL, input_name='AL'), namespaceprefix_ , eol_)) + if self.TD is not None: + namespaceprefix_ = self.TD_nsprefix_ + ':' if (UseCapturedNS_ and self.TD_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTD>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.TD, input_name='TD'), namespaceprefix_ , eol_)) + if self.UP is not None: + namespaceprefix_ = self.UP_nsprefix_ + ':' if (UseCapturedNS_ and self.UP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUP>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.UP, input_name='UP'), namespaceprefix_ , eol_)) + if self.DND is not None: + namespaceprefix_ = self.DND_nsprefix_ + ':' if (UseCapturedNS_ and self.DND_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDND>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.DND, input_name='DND'), namespaceprefix_ , eol_)) + if self.FS is not None: + namespaceprefix_ = self.FS_nsprefix_ + ':' if (UseCapturedNS_ and self.FS_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFS>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.FS, input_name='FS'), namespaceprefix_ , eol_)) + if self.OA is not None: + namespaceprefix_ = self.OA_nsprefix_ + ':' if (UseCapturedNS_ and self.OA_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOA>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.OA, input_name='OA'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FD': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'FD') + ival_ = self.gds_validate_boolean(ival_, node, 'FD') + self.FD = ival_ + self.FD_nsprefix_ = child_.prefix + elif nodeName_ == 'AL': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'AL') + ival_ = self.gds_validate_boolean(ival_, node, 'AL') + self.AL = ival_ + self.AL_nsprefix_ = child_.prefix + elif nodeName_ == 'TD': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'TD') + ival_ = self.gds_validate_boolean(ival_, node, 'TD') + self.TD = ival_ + self.TD_nsprefix_ = child_.prefix + elif nodeName_ == 'UP': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'UP') + ival_ = self.gds_validate_boolean(ival_, node, 'UP') + self.UP = ival_ + self.UP_nsprefix_ = child_.prefix + elif nodeName_ == 'DND': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'DND') + ival_ = self.gds_validate_boolean(ival_, node, 'DND') + self.DND = ival_ + self.DND_nsprefix_ = child_.prefix + elif nodeName_ == 'FS': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'FS') + ival_ = self.gds_validate_boolean(ival_, node, 'FS') + self.FS = ival_ + self.FS_nsprefix_ = child_.prefix + elif nodeName_ == 'OA': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'OA') + ival_ = self.gds_validate_boolean(ival_, node, 'OA') + self.OA = ival_ + self.OA_nsprefix_ = child_.prefix +# end class EMAILType + + +class TrackSummaryType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, EventTime=None, EventDate=None, Event=None, EventCity=None, EventState=None, EventZIPCode=None, EventCountry=None, FirmName=None, Name=None, AuthorizedAgent=None, EventCode=None, ActionCode=None, ReasonCode=None, GeoCertified=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.EventTime = EventTime + self.EventTime_nsprefix_ = None + self.EventDate = EventDate + self.EventDate_nsprefix_ = None + self.Event = Event + self.Event_nsprefix_ = None + self.EventCity = EventCity + self.EventCity_nsprefix_ = None + self.EventState = EventState + self.EventState_nsprefix_ = None + self.EventZIPCode = EventZIPCode + self.EventZIPCode_nsprefix_ = None + self.EventCountry = EventCountry + self.EventCountry_nsprefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.Name = Name + self.Name_nsprefix_ = None + self.AuthorizedAgent = AuthorizedAgent + self.AuthorizedAgent_nsprefix_ = None + self.EventCode = EventCode + self.EventCode_nsprefix_ = None + self.ActionCode = ActionCode + self.ActionCode_nsprefix_ = None + self.ReasonCode = ReasonCode + self.ReasonCode_nsprefix_ = None + self.GeoCertified = GeoCertified + self.GeoCertified_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, TrackSummaryType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if TrackSummaryType.subclass: + return TrackSummaryType.subclass(*args_, **kwargs_) + else: + return TrackSummaryType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_EventTime(self): + return self.EventTime + def set_EventTime(self, EventTime): + self.EventTime = EventTime + def get_EventDate(self): + return self.EventDate + def set_EventDate(self, EventDate): + self.EventDate = EventDate + def get_Event(self): + return self.Event + def set_Event(self, Event): + self.Event = Event + def get_EventCity(self): + return self.EventCity + def set_EventCity(self, EventCity): + self.EventCity = EventCity + def get_EventState(self): + return self.EventState + def set_EventState(self, EventState): + self.EventState = EventState + def get_EventZIPCode(self): + return self.EventZIPCode + def set_EventZIPCode(self, EventZIPCode): + self.EventZIPCode = EventZIPCode + def get_EventCountry(self): + return self.EventCountry + def set_EventCountry(self, EventCountry): + self.EventCountry = EventCountry + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_Name(self): + return self.Name + def set_Name(self, Name): + self.Name = Name + def get_AuthorizedAgent(self): + return self.AuthorizedAgent + def set_AuthorizedAgent(self, AuthorizedAgent): + self.AuthorizedAgent = AuthorizedAgent + def get_EventCode(self): + return self.EventCode + def set_EventCode(self, EventCode): + self.EventCode = EventCode + def get_ActionCode(self): + return self.ActionCode + def set_ActionCode(self, ActionCode): + self.ActionCode = ActionCode + def get_ReasonCode(self): + return self.ReasonCode + def set_ReasonCode(self, ReasonCode): + self.ReasonCode = ReasonCode + def get_GeoCertified(self): + return self.GeoCertified + def set_GeoCertified(self, GeoCertified): + self.GeoCertified = GeoCertified + def has__content(self): + if ( + self.EventTime is not None or + self.EventDate is not None or + self.Event is not None or + self.EventCity is not None or + self.EventState is not None or + self.EventZIPCode is not None or + self.EventCountry is not None or + self.FirmName is not None or + self.Name is not None or + self.AuthorizedAgent is not None or + self.EventCode is not None or + self.ActionCode is not None or + self.ReasonCode is not None or + self.GeoCertified is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackSummaryType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('TrackSummaryType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'TrackSummaryType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TrackSummaryType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TrackSummaryType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TrackSummaryType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackSummaryType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.EventTime is not None: + namespaceprefix_ = self.EventTime_nsprefix_ + ':' if (UseCapturedNS_ and self.EventTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEventTime>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EventTime), input_name='EventTime')), namespaceprefix_ , eol_)) + if self.EventDate is not None: + namespaceprefix_ = self.EventDate_nsprefix_ + ':' if (UseCapturedNS_ and self.EventDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEventDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EventDate), input_name='EventDate')), namespaceprefix_ , eol_)) + if self.Event is not None: + namespaceprefix_ = self.Event_nsprefix_ + ':' if (UseCapturedNS_ and self.Event_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEvent>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Event), input_name='Event')), namespaceprefix_ , eol_)) + if self.EventCity is not None: + namespaceprefix_ = self.EventCity_nsprefix_ + ':' if (UseCapturedNS_ and self.EventCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEventCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EventCity), input_name='EventCity')), namespaceprefix_ , eol_)) + if self.EventState is not None: + namespaceprefix_ = self.EventState_nsprefix_ + ':' if (UseCapturedNS_ and self.EventState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEventState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EventState), input_name='EventState')), namespaceprefix_ , eol_)) + if self.EventZIPCode is not None: + namespaceprefix_ = self.EventZIPCode_nsprefix_ + ':' if (UseCapturedNS_ and self.EventZIPCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEventZIPCode>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.EventZIPCode, input_name='EventZIPCode'), namespaceprefix_ , eol_)) + if self.EventCountry is not None: + namespaceprefix_ = self.EventCountry_nsprefix_ + ':' if (UseCapturedNS_ and self.EventCountry_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEventCountry>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EventCountry), input_name='EventCountry')), namespaceprefix_ , eol_)) + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.Name is not None: + namespaceprefix_ = self.Name_nsprefix_ + ':' if (UseCapturedNS_ and self.Name_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Name), input_name='Name')), namespaceprefix_ , eol_)) + if self.AuthorizedAgent is not None: + namespaceprefix_ = self.AuthorizedAgent_nsprefix_ + ':' if (UseCapturedNS_ and self.AuthorizedAgent_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAuthorizedAgent>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.AuthorizedAgent, input_name='AuthorizedAgent'), namespaceprefix_ , eol_)) + if self.EventCode is not None: + namespaceprefix_ = self.EventCode_nsprefix_ + ':' if (UseCapturedNS_ and self.EventCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEventCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EventCode), input_name='EventCode')), namespaceprefix_ , eol_)) + if self.ActionCode is not None: + namespaceprefix_ = self.ActionCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ActionCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sActionCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ActionCode), input_name='ActionCode')), namespaceprefix_ , eol_)) + if self.ReasonCode is not None: + namespaceprefix_ = self.ReasonCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ReasonCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sReasonCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ReasonCode), input_name='ReasonCode')), namespaceprefix_ , eol_)) + if self.GeoCertified is not None: + namespaceprefix_ = self.GeoCertified_nsprefix_ + ':' if (UseCapturedNS_ and self.GeoCertified_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGeoCertified>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.GeoCertified, input_name='GeoCertified'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'EventTime': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EventTime') + value_ = self.gds_validate_string(value_, node, 'EventTime') + self.EventTime = value_ + self.EventTime_nsprefix_ = child_.prefix + elif nodeName_ == 'EventDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EventDate') + value_ = self.gds_validate_string(value_, node, 'EventDate') + self.EventDate = value_ + self.EventDate_nsprefix_ = child_.prefix + elif nodeName_ == 'Event': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Event') + value_ = self.gds_validate_string(value_, node, 'Event') + self.Event = value_ + self.Event_nsprefix_ = child_.prefix + elif nodeName_ == 'EventCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EventCity') + value_ = self.gds_validate_string(value_, node, 'EventCity') + self.EventCity = value_ + self.EventCity_nsprefix_ = child_.prefix + elif nodeName_ == 'EventState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EventState') + value_ = self.gds_validate_string(value_, node, 'EventState') + self.EventState = value_ + self.EventState_nsprefix_ = child_.prefix + elif nodeName_ == 'EventZIPCode' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'EventZIPCode') + ival_ = self.gds_validate_integer(ival_, node, 'EventZIPCode') + self.EventZIPCode = ival_ + self.EventZIPCode_nsprefix_ = child_.prefix + elif nodeName_ == 'EventCountry': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EventCountry') + value_ = self.gds_validate_string(value_, node, 'EventCountry') + self.EventCountry = value_ + self.EventCountry_nsprefix_ = child_.prefix + elif nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'Name': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Name') + value_ = self.gds_validate_string(value_, node, 'Name') + self.Name = value_ + self.Name_nsprefix_ = child_.prefix + elif nodeName_ == 'AuthorizedAgent': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'AuthorizedAgent') + ival_ = self.gds_validate_boolean(ival_, node, 'AuthorizedAgent') + self.AuthorizedAgent = ival_ + self.AuthorizedAgent_nsprefix_ = child_.prefix + elif nodeName_ == 'EventCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EventCode') + value_ = self.gds_validate_string(value_, node, 'EventCode') + self.EventCode = value_ + self.EventCode_nsprefix_ = child_.prefix + elif nodeName_ == 'ActionCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ActionCode') + value_ = self.gds_validate_string(value_, node, 'ActionCode') + self.ActionCode = value_ + self.ActionCode_nsprefix_ = child_.prefix + elif nodeName_ == 'ReasonCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ReasonCode') + value_ = self.gds_validate_string(value_, node, 'ReasonCode') + self.ReasonCode = value_ + self.ReasonCode_nsprefix_ = child_.prefix + elif nodeName_ == 'GeoCertified': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'GeoCertified') + ival_ = self.gds_validate_boolean(ival_, node, 'GeoCertified') + self.GeoCertified = ival_ + self.GeoCertified_nsprefix_ = child_.prefix +# end class TrackSummaryType + + +class TrackDetailType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, EventTime=None, EventDate=None, Event=None, EventCity=None, EventState=None, EventZIPCode=None, EventCountry=None, FirmName=None, Name=None, AuthorizedAgent=None, GeoCertified=None, EventCode=None, ActionCode=None, ReasonCode=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.EventTime = EventTime + self.EventTime_nsprefix_ = None + self.EventDate = EventDate + self.EventDate_nsprefix_ = None + self.Event = Event + self.Event_nsprefix_ = None + self.EventCity = EventCity + self.EventCity_nsprefix_ = None + self.EventState = EventState + self.EventState_nsprefix_ = None + self.EventZIPCode = EventZIPCode + self.EventZIPCode_nsprefix_ = None + self.EventCountry = EventCountry + self.EventCountry_nsprefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.Name = Name + self.Name_nsprefix_ = None + self.AuthorizedAgent = AuthorizedAgent + self.AuthorizedAgent_nsprefix_ = None + self.GeoCertified = GeoCertified + self.GeoCertified_nsprefix_ = None + self.EventCode = EventCode + self.EventCode_nsprefix_ = None + self.ActionCode = ActionCode + self.ActionCode_nsprefix_ = None + self.ReasonCode = ReasonCode + self.ReasonCode_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, TrackDetailType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if TrackDetailType.subclass: + return TrackDetailType.subclass(*args_, **kwargs_) + else: + return TrackDetailType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_EventTime(self): + return self.EventTime + def set_EventTime(self, EventTime): + self.EventTime = EventTime + def get_EventDate(self): + return self.EventDate + def set_EventDate(self, EventDate): + self.EventDate = EventDate + def get_Event(self): + return self.Event + def set_Event(self, Event): + self.Event = Event + def get_EventCity(self): + return self.EventCity + def set_EventCity(self, EventCity): + self.EventCity = EventCity + def get_EventState(self): + return self.EventState + def set_EventState(self, EventState): + self.EventState = EventState + def get_EventZIPCode(self): + return self.EventZIPCode + def set_EventZIPCode(self, EventZIPCode): + self.EventZIPCode = EventZIPCode + def get_EventCountry(self): + return self.EventCountry + def set_EventCountry(self, EventCountry): + self.EventCountry = EventCountry + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_Name(self): + return self.Name + def set_Name(self, Name): + self.Name = Name + def get_AuthorizedAgent(self): + return self.AuthorizedAgent + def set_AuthorizedAgent(self, AuthorizedAgent): + self.AuthorizedAgent = AuthorizedAgent + def get_GeoCertified(self): + return self.GeoCertified + def set_GeoCertified(self, GeoCertified): + self.GeoCertified = GeoCertified + def get_EventCode(self): + return self.EventCode + def set_EventCode(self, EventCode): + self.EventCode = EventCode + def get_ActionCode(self): + return self.ActionCode + def set_ActionCode(self, ActionCode): + self.ActionCode = ActionCode + def get_ReasonCode(self): + return self.ReasonCode + def set_ReasonCode(self, ReasonCode): + self.ReasonCode = ReasonCode + def has__content(self): + if ( + self.EventTime is not None or + self.EventDate is not None or + self.Event is not None or + self.EventCity is not None or + self.EventState is not None or + self.EventZIPCode is not None or + self.EventCountry is not None or + self.FirmName is not None or + self.Name is not None or + self.AuthorizedAgent is not None or + self.GeoCertified is not None or + self.EventCode is not None or + self.ActionCode is not None or + self.ReasonCode is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackDetailType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('TrackDetailType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'TrackDetailType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TrackDetailType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TrackDetailType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TrackDetailType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackDetailType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.EventTime is not None: + namespaceprefix_ = self.EventTime_nsprefix_ + ':' if (UseCapturedNS_ and self.EventTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEventTime>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EventTime), input_name='EventTime')), namespaceprefix_ , eol_)) + if self.EventDate is not None: + namespaceprefix_ = self.EventDate_nsprefix_ + ':' if (UseCapturedNS_ and self.EventDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEventDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EventDate), input_name='EventDate')), namespaceprefix_ , eol_)) + if self.Event is not None: + namespaceprefix_ = self.Event_nsprefix_ + ':' if (UseCapturedNS_ and self.Event_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEvent>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Event), input_name='Event')), namespaceprefix_ , eol_)) + if self.EventCity is not None: + namespaceprefix_ = self.EventCity_nsprefix_ + ':' if (UseCapturedNS_ and self.EventCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEventCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EventCity), input_name='EventCity')), namespaceprefix_ , eol_)) + if self.EventState is not None: + namespaceprefix_ = self.EventState_nsprefix_ + ':' if (UseCapturedNS_ and self.EventState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEventState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EventState), input_name='EventState')), namespaceprefix_ , eol_)) + if self.EventZIPCode is not None: + namespaceprefix_ = self.EventZIPCode_nsprefix_ + ':' if (UseCapturedNS_ and self.EventZIPCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEventZIPCode>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.EventZIPCode, input_name='EventZIPCode'), namespaceprefix_ , eol_)) + if self.EventCountry is not None: + namespaceprefix_ = self.EventCountry_nsprefix_ + ':' if (UseCapturedNS_ and self.EventCountry_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEventCountry>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EventCountry), input_name='EventCountry')), namespaceprefix_ , eol_)) + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.Name is not None: + namespaceprefix_ = self.Name_nsprefix_ + ':' if (UseCapturedNS_ and self.Name_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Name), input_name='Name')), namespaceprefix_ , eol_)) + if self.AuthorizedAgent is not None: + namespaceprefix_ = self.AuthorizedAgent_nsprefix_ + ':' if (UseCapturedNS_ and self.AuthorizedAgent_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAuthorizedAgent>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.AuthorizedAgent, input_name='AuthorizedAgent'), namespaceprefix_ , eol_)) + if self.GeoCertified is not None: + namespaceprefix_ = self.GeoCertified_nsprefix_ + ':' if (UseCapturedNS_ and self.GeoCertified_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGeoCertified>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.GeoCertified, input_name='GeoCertified'), namespaceprefix_ , eol_)) + if self.EventCode is not None: + namespaceprefix_ = self.EventCode_nsprefix_ + ':' if (UseCapturedNS_ and self.EventCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEventCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EventCode), input_name='EventCode')), namespaceprefix_ , eol_)) + if self.ActionCode is not None: + namespaceprefix_ = self.ActionCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ActionCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sActionCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ActionCode), input_name='ActionCode')), namespaceprefix_ , eol_)) + if self.ReasonCode is not None: + namespaceprefix_ = self.ReasonCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ReasonCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sReasonCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ReasonCode), input_name='ReasonCode')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'EventTime': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EventTime') + value_ = self.gds_validate_string(value_, node, 'EventTime') + self.EventTime = value_ + self.EventTime_nsprefix_ = child_.prefix + elif nodeName_ == 'EventDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EventDate') + value_ = self.gds_validate_string(value_, node, 'EventDate') + self.EventDate = value_ + self.EventDate_nsprefix_ = child_.prefix + elif nodeName_ == 'Event': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Event') + value_ = self.gds_validate_string(value_, node, 'Event') + self.Event = value_ + self.Event_nsprefix_ = child_.prefix + elif nodeName_ == 'EventCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EventCity') + value_ = self.gds_validate_string(value_, node, 'EventCity') + self.EventCity = value_ + self.EventCity_nsprefix_ = child_.prefix + elif nodeName_ == 'EventState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EventState') + value_ = self.gds_validate_string(value_, node, 'EventState') + self.EventState = value_ + self.EventState_nsprefix_ = child_.prefix + elif nodeName_ == 'EventZIPCode' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'EventZIPCode') + ival_ = self.gds_validate_integer(ival_, node, 'EventZIPCode') + self.EventZIPCode = ival_ + self.EventZIPCode_nsprefix_ = child_.prefix + elif nodeName_ == 'EventCountry': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EventCountry') + value_ = self.gds_validate_string(value_, node, 'EventCountry') + self.EventCountry = value_ + self.EventCountry_nsprefix_ = child_.prefix + elif nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'Name': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Name') + value_ = self.gds_validate_string(value_, node, 'Name') + self.Name = value_ + self.Name_nsprefix_ = child_.prefix + elif nodeName_ == 'AuthorizedAgent': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'AuthorizedAgent') + ival_ = self.gds_validate_boolean(ival_, node, 'AuthorizedAgent') + self.AuthorizedAgent = ival_ + self.AuthorizedAgent_nsprefix_ = child_.prefix + elif nodeName_ == 'GeoCertified': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'GeoCertified') + ival_ = self.gds_validate_boolean(ival_, node, 'GeoCertified') + self.GeoCertified = ival_ + self.GeoCertified_nsprefix_ = child_.prefix + elif nodeName_ == 'EventCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EventCode') + value_ = self.gds_validate_string(value_, node, 'EventCode') + self.EventCode = value_ + self.EventCode_nsprefix_ = child_.prefix + elif nodeName_ == 'ActionCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ActionCode') + value_ = self.gds_validate_string(value_, node, 'ActionCode') + self.ActionCode = value_ + self.ActionCode_nsprefix_ = child_.prefix + elif nodeName_ == 'ReasonCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ReasonCode') + value_ = self.gds_validate_string(value_, node, 'ReasonCode') + self.ReasonCode = value_ + self.ReasonCode_nsprefix_ = child_.prefix +# end class TrackDetailType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'TrackResponse' + rootClass = TrackResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'TrackResponse' + rootClass = TrackResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'TrackResponse' + rootClass = TrackResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'TrackResponse' + rootClass = TrackResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from track_response import *\n\n') + sys.stdout.write('import track_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "EMAILType", + "EnabledNotificationRequestsType", + "SMSType", + "TrackDetailType", + "TrackInfoType", + "TrackResponse", + "TrackSummaryType" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/zip_code_lookup_request.py b/modules/connectors/usps/karrio/schemas/usps/zip_code_lookup_request.py new file mode 100644 index 0000000000..907206dd5b --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/zip_code_lookup_request.py @@ -0,0 +1,1542 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:47 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/zip_code_lookup_request.py') +# +# Command line arguments: +# ./schemas/ZipCodeLookupRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/zip_code_lookup_request.py" ./schemas/ZipCodeLookupRequest.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class ZipCodeLookupRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, Address=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.Address = Address + self.Address_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ZipCodeLookupRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ZipCodeLookupRequest.subclass: + return ZipCodeLookupRequest.subclass(*args_, **kwargs_) + else: + return ZipCodeLookupRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Address(self): + return self.Address + def set_Address(self, Address): + self.Address = Address + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.Address is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ZipCodeLookupRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ZipCodeLookupRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ZipCodeLookupRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ZipCodeLookupRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ZipCodeLookupRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ZipCodeLookupRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ZipCodeLookupRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Address is not None: + namespaceprefix_ = self.Address_nsprefix_ + ':' if (UseCapturedNS_ and self.Address_nsprefix_) else '' + self.Address.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Address', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Address': + obj_ = AddressType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Address = obj_ + obj_.original_tagname_ = 'Address' +# end class ZipCodeLookupRequest + + +class AddressType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ID=None, Address1=None, Address2=None, FirmName=None, City=None, State=None, Zip5=None, Zip4=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ID = _cast(int, ID) + self.ID_nsprefix_ = None + self.Address1 = Address1 + self.Address1_nsprefix_ = None + self.Address2 = Address2 + self.Address2_nsprefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.Zip5 = Zip5 + self.Zip5_nsprefix_ = None + self.Zip4 = Zip4 + self.Zip4_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, AddressType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if AddressType.subclass: + return AddressType.subclass(*args_, **kwargs_) + else: + return AddressType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Address1(self): + return self.Address1 + def set_Address1(self, Address1): + self.Address1 = Address1 + def get_Address2(self): + return self.Address2 + def set_Address2(self, Address2): + self.Address2 = Address2 + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_Zip5(self): + return self.Zip5 + def set_Zip5(self, Zip5): + self.Zip5 = Zip5 + def get_Zip4(self): + return self.Zip4 + def set_Zip4(self, Zip4): + self.Zip4 = Zip4 + def get_ID(self): + return self.ID + def set_ID(self, ID): + self.ID = ID + def has__content(self): + if ( + self.Address1 is not None or + self.Address2 is not None or + self.FirmName is not None or + self.City is not None or + self.State is not None or + self.Zip5 is not None or + self.Zip4 is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AddressType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('AddressType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'AddressType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AddressType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AddressType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AddressType'): + if self.ID is not None and 'ID' not in already_processed: + already_processed.add('ID') + outfile.write(' ID="%s"' % self.gds_format_integer(self.ID, input_name='ID')) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AddressType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Address1 is not None: + namespaceprefix_ = self.Address1_nsprefix_ + ':' if (UseCapturedNS_ and self.Address1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address1), input_name='Address1')), namespaceprefix_ , eol_)) + if self.Address2 is not None: + namespaceprefix_ = self.Address2_nsprefix_ + ':' if (UseCapturedNS_ and self.Address2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address2), input_name='Address2')), namespaceprefix_ , eol_)) + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.Zip5 is not None: + namespaceprefix_ = self.Zip5_nsprefix_ + ':' if (UseCapturedNS_ and self.Zip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZip5>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Zip5, input_name='Zip5'), namespaceprefix_ , eol_)) + if self.Zip4 is not None: + namespaceprefix_ = self.Zip4_nsprefix_ + ':' if (UseCapturedNS_ and self.Zip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZip4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Zip4), input_name='Zip4')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ID', node) + if value is not None and 'ID' not in already_processed: + already_processed.add('ID') + self.ID = self.gds_parse_integer(value, node, 'ID') + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Address1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address1') + value_ = self.gds_validate_string(value_, node, 'Address1') + self.Address1 = value_ + self.Address1_nsprefix_ = child_.prefix + elif nodeName_ == 'Address2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address2') + value_ = self.gds_validate_string(value_, node, 'Address2') + self.Address2 = value_ + self.Address2_nsprefix_ = child_.prefix + elif nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'Zip5' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Zip5') + ival_ = self.gds_validate_integer(ival_, node, 'Zip5') + self.Zip5 = ival_ + self.Zip5_nsprefix_ = child_.prefix + elif nodeName_ == 'Zip4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Zip4') + value_ = self.gds_validate_string(value_, node, 'Zip4') + self.Zip4 = value_ + self.Zip4_nsprefix_ = child_.prefix +# end class AddressType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ZipCodeLookupRequest' + rootClass = ZipCodeLookupRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ZipCodeLookupRequest' + rootClass = ZipCodeLookupRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ZipCodeLookupRequest' + rootClass = ZipCodeLookupRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ZipCodeLookupRequest' + rootClass = ZipCodeLookupRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from zip_code_lookup_request import *\n\n') + sys.stdout.write('import zip_code_lookup_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "AddressType", + "ZipCodeLookupRequest" +] diff --git a/modules/connectors/usps/karrio/schemas/usps/zip_code_lookup_response.py b/modules/connectors/usps/karrio/schemas/usps/zip_code_lookup_response.py new file mode 100644 index 0000000000..14e4b78eff --- /dev/null +++ b/modules/connectors/usps/karrio/schemas/usps/zip_code_lookup_response.py @@ -0,0 +1,1544 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:08:47 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps/zip_code_lookup_response.py') +# +# Command line arguments: +# ./schemas/ZipCodeLookupResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps/zip_code_lookup_response.py" ./schemas/ZipCodeLookupResponse.xsd +# +# Current working directory (os.getcwd()): +# usps +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class ZipCodeLookupResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Address=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if Address is None: + self.Address = [] + else: + self.Address = Address + self.Address_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ZipCodeLookupResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ZipCodeLookupResponse.subclass: + return ZipCodeLookupResponse.subclass(*args_, **kwargs_) + else: + return ZipCodeLookupResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Address(self): + return self.Address + def set_Address(self, Address): + self.Address = Address + def add_Address(self, value): + self.Address.append(value) + def insert_Address_at(self, index, value): + self.Address.insert(index, value) + def replace_Address_at(self, index, value): + self.Address[index] = value + def has__content(self): + if ( + self.Address + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ZipCodeLookupResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ZipCodeLookupResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ZipCodeLookupResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ZipCodeLookupResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ZipCodeLookupResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ZipCodeLookupResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ZipCodeLookupResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for Address_ in self.Address: + namespaceprefix_ = self.Address_nsprefix_ + ':' if (UseCapturedNS_ and self.Address_nsprefix_) else '' + Address_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Address', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Address': + obj_ = AddressType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Address.append(obj_) + obj_.original_tagname_ = 'Address' +# end class ZipCodeLookupResponse + + +class AddressType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ID=None, Address1=None, Address2=None, City=None, FirmName=None, State=None, Urbanization=None, Zip5=None, Zip4=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ID = _cast(int, ID) + self.ID_nsprefix_ = None + self.Address1 = Address1 + self.Address1_nsprefix_ = None + self.Address2 = Address2 + self.Address2_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.Urbanization = Urbanization + self.Urbanization_nsprefix_ = None + self.Zip5 = Zip5 + self.Zip5_nsprefix_ = None + self.Zip4 = Zip4 + self.Zip4_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, AddressType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if AddressType.subclass: + return AddressType.subclass(*args_, **kwargs_) + else: + return AddressType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Address1(self): + return self.Address1 + def set_Address1(self, Address1): + self.Address1 = Address1 + def get_Address2(self): + return self.Address2 + def set_Address2(self, Address2): + self.Address2 = Address2 + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_Urbanization(self): + return self.Urbanization + def set_Urbanization(self, Urbanization): + self.Urbanization = Urbanization + def get_Zip5(self): + return self.Zip5 + def set_Zip5(self, Zip5): + self.Zip5 = Zip5 + def get_Zip4(self): + return self.Zip4 + def set_Zip4(self, Zip4): + self.Zip4 = Zip4 + def get_ID(self): + return self.ID + def set_ID(self, ID): + self.ID = ID + def has__content(self): + if ( + self.Address1 is not None or + self.Address2 is not None or + self.City is not None or + self.FirmName is not None or + self.State is not None or + self.Urbanization is not None or + self.Zip5 is not None or + self.Zip4 is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AddressType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('AddressType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'AddressType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AddressType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AddressType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AddressType'): + if self.ID is not None and 'ID' not in already_processed: + already_processed.add('ID') + outfile.write(' ID="%s"' % self.gds_format_integer(self.ID, input_name='ID')) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AddressType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Address1 is not None: + namespaceprefix_ = self.Address1_nsprefix_ + ':' if (UseCapturedNS_ and self.Address1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address1), input_name='Address1')), namespaceprefix_ , eol_)) + if self.Address2 is not None: + namespaceprefix_ = self.Address2_nsprefix_ + ':' if (UseCapturedNS_ and self.Address2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address2), input_name='Address2')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.Urbanization is not None: + namespaceprefix_ = self.Urbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.Urbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Urbanization), input_name='Urbanization')), namespaceprefix_ , eol_)) + if self.Zip5 is not None: + namespaceprefix_ = self.Zip5_nsprefix_ + ':' if (UseCapturedNS_ and self.Zip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZip5>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Zip5, input_name='Zip5'), namespaceprefix_ , eol_)) + if self.Zip4 is not None: + namespaceprefix_ = self.Zip4_nsprefix_ + ':' if (UseCapturedNS_ and self.Zip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZip4>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Zip4, input_name='Zip4'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ID', node) + if value is not None and 'ID' not in already_processed: + already_processed.add('ID') + self.ID = self.gds_parse_integer(value, node, 'ID') + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Address1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address1') + value_ = self.gds_validate_string(value_, node, 'Address1') + self.Address1 = value_ + self.Address1_nsprefix_ = child_.prefix + elif nodeName_ == 'Address2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address2') + value_ = self.gds_validate_string(value_, node, 'Address2') + self.Address2 = value_ + self.Address2_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'Urbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Urbanization') + value_ = self.gds_validate_string(value_, node, 'Urbanization') + self.Urbanization = value_ + self.Urbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'Zip5' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Zip5') + ival_ = self.gds_validate_integer(ival_, node, 'Zip5') + self.Zip5 = ival_ + self.Zip5_nsprefix_ = child_.prefix + elif nodeName_ == 'Zip4' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Zip4') + ival_ = self.gds_validate_integer(ival_, node, 'Zip4') + self.Zip4 = ival_ + self.Zip4_nsprefix_ = child_.prefix +# end class AddressType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ZipCodeLookupResponse' + rootClass = ZipCodeLookupResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ZipCodeLookupResponse' + rootClass = ZipCodeLookupResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ZipCodeLookupResponse' + rootClass = ZipCodeLookupResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ZipCodeLookupResponse' + rootClass = ZipCodeLookupResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from zip_code_lookup_response import *\n\n') + sys.stdout.write('import zip_code_lookup_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "AddressType", + "ZipCodeLookupResponse" +] diff --git a/modules/connectors/usps/schemas/AddressValidateRequest.xsd b/modules/connectors/usps/schemas/AddressValidateRequest.xsd new file mode 100644 index 0000000000..83a1ff85d6 --- /dev/null +++ b/modules/connectors/usps/schemas/AddressValidateRequest.xsd @@ -0,0 +1,25 @@ + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/AddressValidateResponse.xsd b/modules/connectors/usps/schemas/AddressValidateResponse.xsd new file mode 100644 index 0000000000..cf6a471ecc --- /dev/null +++ b/modules/connectors/usps/schemas/AddressValidateResponse.xsd @@ -0,0 +1,34 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/CarrierPickupAvailabilityRequest.xsd b/modules/connectors/usps/schemas/CarrierPickupAvailabilityRequest.xsd new file mode 100644 index 0000000000..31e9a74351 --- /dev/null +++ b/modules/connectors/usps/schemas/CarrierPickupAvailabilityRequest.xsd @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/CarrierPickupAvailabilityResponse.xsd b/modules/connectors/usps/schemas/CarrierPickupAvailabilityResponse.xsd new file mode 100644 index 0000000000..248c141b92 --- /dev/null +++ b/modules/connectors/usps/schemas/CarrierPickupAvailabilityResponse.xsd @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/CarrierPickupCancelRequest.xsd b/modules/connectors/usps/schemas/CarrierPickupCancelRequest.xsd new file mode 100644 index 0000000000..8b20e36d6d --- /dev/null +++ b/modules/connectors/usps/schemas/CarrierPickupCancelRequest.xsd @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/CarrierPickupCancelResponse.xsd b/modules/connectors/usps/schemas/CarrierPickupCancelResponse.xsd new file mode 100644 index 0000000000..3200cb85ef --- /dev/null +++ b/modules/connectors/usps/schemas/CarrierPickupCancelResponse.xsd @@ -0,0 +1,35 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/CarrierPickupChangeRequest.xsd b/modules/connectors/usps/schemas/CarrierPickupChangeRequest.xsd new file mode 100644 index 0000000000..f744949cb1 --- /dev/null +++ b/modules/connectors/usps/schemas/CarrierPickupChangeRequest.xsd @@ -0,0 +1,35 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/CarrierPickupChangeResponse.xsd b/modules/connectors/usps/schemas/CarrierPickupChangeResponse.xsd new file mode 100644 index 0000000000..e95b729ece --- /dev/null +++ b/modules/connectors/usps/schemas/CarrierPickupChangeResponse.xsd @@ -0,0 +1,35 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/CarrierPickupInquiryRequest.xsd b/modules/connectors/usps/schemas/CarrierPickupInquiryRequest.xsd new file mode 100644 index 0000000000..e417cb1421 --- /dev/null +++ b/modules/connectors/usps/schemas/CarrierPickupInquiryRequest.xsd @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/CarrierPickupInquiryResponse.xsd b/modules/connectors/usps/schemas/CarrierPickupInquiryResponse.xsd new file mode 100644 index 0000000000..ddd5162dcb --- /dev/null +++ b/modules/connectors/usps/schemas/CarrierPickupInquiryResponse.xsd @@ -0,0 +1,35 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/CarrierPickupScheduleRequest.xsd b/modules/connectors/usps/schemas/CarrierPickupScheduleRequest.xsd new file mode 100644 index 0000000000..ecf00cdd76 --- /dev/null +++ b/modules/connectors/usps/schemas/CarrierPickupScheduleRequest.xsd @@ -0,0 +1,34 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/CarrierPickupScheduleResponse.xsd b/modules/connectors/usps/schemas/CarrierPickupScheduleResponse.xsd new file mode 100644 index 0000000000..bffdac0d99 --- /dev/null +++ b/modules/connectors/usps/schemas/CarrierPickupScheduleResponse.xsd @@ -0,0 +1,35 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/CityStateLookupRequest.xsd b/modules/connectors/usps/schemas/CityStateLookupRequest.xsd new file mode 100644 index 0000000000..2e2a2ae9bc --- /dev/null +++ b/modules/connectors/usps/schemas/CityStateLookupRequest.xsd @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/CityStateLookupResponse.xsd b/modules/connectors/usps/schemas/CityStateLookupResponse.xsd new file mode 100644 index 0000000000..b2937464a0 --- /dev/null +++ b/modules/connectors/usps/schemas/CityStateLookupResponse.xsd @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/EMRSV4.0BulkRequest.xsd b/modules/connectors/usps/schemas/EMRSV4.0BulkRequest.xsd new file mode 100644 index 0000000000..31669dd41e --- /dev/null +++ b/modules/connectors/usps/schemas/EMRSV4.0BulkRequest.xsd @@ -0,0 +1,41 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/Error.xsd b/modules/connectors/usps/schemas/Error.xsd new file mode 100644 index 0000000000..447ae72804 --- /dev/null +++ b/modules/connectors/usps/schemas/Error.xsd @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/ExpressMailCommitmentRequest.xsd b/modules/connectors/usps/schemas/ExpressMailCommitmentRequest.xsd new file mode 100644 index 0000000000..7a166df396 --- /dev/null +++ b/modules/connectors/usps/schemas/ExpressMailCommitmentRequest.xsd @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/ExpressMailCommitmentResponse.xsd b/modules/connectors/usps/schemas/ExpressMailCommitmentResponse.xsd new file mode 100644 index 0000000000..24d8ebbda3 --- /dev/null +++ b/modules/connectors/usps/schemas/ExpressMailCommitmentResponse.xsd @@ -0,0 +1,41 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/FirstClassMailRequest.xsd b/modules/connectors/usps/schemas/FirstClassMailRequest.xsd new file mode 100644 index 0000000000..610b32a3a6 --- /dev/null +++ b/modules/connectors/usps/schemas/FirstClassMailRequest.xsd @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/FirstClassMailResponse.xsd b/modules/connectors/usps/schemas/FirstClassMailResponse.xsd new file mode 100644 index 0000000000..79bc0cbc0b --- /dev/null +++ b/modules/connectors/usps/schemas/FirstClassMailResponse.xsd @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/HFPFacilityInfoRequest.xsd b/modules/connectors/usps/schemas/HFPFacilityInfoRequest.xsd new file mode 100644 index 0000000000..70cfec3227 --- /dev/null +++ b/modules/connectors/usps/schemas/HFPFacilityInfoRequest.xsd @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/HFPFacilityInfoResponse.xsd b/modules/connectors/usps/schemas/HFPFacilityInfoResponse.xsd new file mode 100644 index 0000000000..45f171ca60 --- /dev/null +++ b/modules/connectors/usps/schemas/HFPFacilityInfoResponse.xsd @@ -0,0 +1,27 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/IntlRateV2Request.xsd b/modules/connectors/usps/schemas/IntlRateV2Request.xsd new file mode 100644 index 0000000000..e5414ec1ee --- /dev/null +++ b/modules/connectors/usps/schemas/IntlRateV2Request.xsd @@ -0,0 +1,73 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/IntlRateV2Response.xsd b/modules/connectors/usps/schemas/IntlRateV2Response.xsd new file mode 100644 index 0000000000..4b43fbc858 --- /dev/null +++ b/modules/connectors/usps/schemas/IntlRateV2Response.xsd @@ -0,0 +1,104 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/MRSV4.0Request.xsd b/modules/connectors/usps/schemas/MRSV4.0Request.xsd new file mode 100644 index 0000000000..961ee9b08b --- /dev/null +++ b/modules/connectors/usps/schemas/MRSV4.0Request.xsd @@ -0,0 +1,41 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/PTSEmailRequest.xsd b/modules/connectors/usps/schemas/PTSEmailRequest.xsd new file mode 100644 index 0000000000..bf47c328c4 --- /dev/null +++ b/modules/connectors/usps/schemas/PTSEmailRequest.xsd @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/PTSEmailResult.xsd b/modules/connectors/usps/schemas/PTSEmailResult.xsd new file mode 100644 index 0000000000..bc7f37333b --- /dev/null +++ b/modules/connectors/usps/schemas/PTSEmailResult.xsd @@ -0,0 +1,11 @@ + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/PTSPODRequest.xsd b/modules/connectors/usps/schemas/PTSPODRequest.xsd new file mode 100644 index 0000000000..3930a440e1 --- /dev/null +++ b/modules/connectors/usps/schemas/PTSPODRequest.xsd @@ -0,0 +1,30 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/PTSPODResult.xsd b/modules/connectors/usps/schemas/PTSPODResult.xsd new file mode 100644 index 0000000000..cad85f5c0a --- /dev/null +++ b/modules/connectors/usps/schemas/PTSPODResult.xsd @@ -0,0 +1,11 @@ + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/PTSRRERequest.xsd b/modules/connectors/usps/schemas/PTSRRERequest.xsd new file mode 100644 index 0000000000..902c799900 --- /dev/null +++ b/modules/connectors/usps/schemas/PTSRRERequest.xsd @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/PTSRREResult.xsd b/modules/connectors/usps/schemas/PTSRREResult.xsd new file mode 100644 index 0000000000..8e6a920c77 --- /dev/null +++ b/modules/connectors/usps/schemas/PTSRREResult.xsd @@ -0,0 +1,11 @@ + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/PTSTPODResult.xsd b/modules/connectors/usps/schemas/PTSTPODResult.xsd new file mode 100644 index 0000000000..a7e1f911e4 --- /dev/null +++ b/modules/connectors/usps/schemas/PTSTPODResult.xsd @@ -0,0 +1,11 @@ + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/PTSTPodRequest.xsd b/modules/connectors/usps/schemas/PTSTPodRequest.xsd new file mode 100644 index 0000000000..6be82855e6 --- /dev/null +++ b/modules/connectors/usps/schemas/PTSTPodRequest.xsd @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/PriorityMailRequest.xsd b/modules/connectors/usps/schemas/PriorityMailRequest.xsd new file mode 100644 index 0000000000..80d0969b04 --- /dev/null +++ b/modules/connectors/usps/schemas/PriorityMailRequest.xsd @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/PriorityMailResponse.xsd b/modules/connectors/usps/schemas/PriorityMailResponse.xsd new file mode 100644 index 0000000000..0efd8d5dee --- /dev/null +++ b/modules/connectors/usps/schemas/PriorityMailResponse.xsd @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/RateV4Request.xsd b/modules/connectors/usps/schemas/RateV4Request.xsd new file mode 100644 index 0000000000..fb428e27d3 --- /dev/null +++ b/modules/connectors/usps/schemas/RateV4Request.xsd @@ -0,0 +1,81 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/RateV4Response.xsd b/modules/connectors/usps/schemas/RateV4Response.xsd new file mode 100644 index 0000000000..2a98ba9448 --- /dev/null +++ b/modules/connectors/usps/schemas/RateV4Response.xsd @@ -0,0 +1,85 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/SCANRequest.xsd b/modules/connectors/usps/schemas/SCANRequest.xsd new file mode 100644 index 0000000000..33e49c0652 --- /dev/null +++ b/modules/connectors/usps/schemas/SCANRequest.xsd @@ -0,0 +1,44 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/SCANResponse.xsd b/modules/connectors/usps/schemas/SCANResponse.xsd new file mode 100644 index 0000000000..f20e2ab8f9 --- /dev/null +++ b/modules/connectors/usps/schemas/SCANResponse.xsd @@ -0,0 +1,32 @@ + + + + + + + + + + + + + + + + + 65255 skipped + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/SDCGetLocationsRequest.xsd b/modules/connectors/usps/schemas/SDCGetLocationsRequest.xsd new file mode 100644 index 0000000000..f14b908755 --- /dev/null +++ b/modules/connectors/usps/schemas/SDCGetLocationsRequest.xsd @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/SDCGetLocationsResponse.xsd b/modules/connectors/usps/schemas/SDCGetLocationsResponse.xsd new file mode 100644 index 0000000000..2d6234bb0e --- /dev/null +++ b/modules/connectors/usps/schemas/SDCGetLocationsResponse.xsd @@ -0,0 +1,124 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/StandardBRequest.xsd b/modules/connectors/usps/schemas/StandardBRequest.xsd new file mode 100644 index 0000000000..dc5879b3d5 --- /dev/null +++ b/modules/connectors/usps/schemas/StandardBRequest.xsd @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/StandardBResponse.xsd b/modules/connectors/usps/schemas/StandardBResponse.xsd new file mode 100644 index 0000000000..9671fcfd48 --- /dev/null +++ b/modules/connectors/usps/schemas/StandardBResponse.xsd @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/TrackFieldRequest.xsd b/modules/connectors/usps/schemas/TrackFieldRequest.xsd new file mode 100644 index 0000000000..ad0236d0a1 --- /dev/null +++ b/modules/connectors/usps/schemas/TrackFieldRequest.xsd @@ -0,0 +1,28 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/TrackRequest.xsd b/modules/connectors/usps/schemas/TrackRequest.xsd new file mode 100644 index 0000000000..55350d7e45 --- /dev/null +++ b/modules/connectors/usps/schemas/TrackRequest.xsd @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/TrackResponse.xsd b/modules/connectors/usps/schemas/TrackResponse.xsd new file mode 100644 index 0000000000..11020f5e11 --- /dev/null +++ b/modules/connectors/usps/schemas/TrackResponse.xsd @@ -0,0 +1,132 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/ZipCodeLookupRequest.xsd b/modules/connectors/usps/schemas/ZipCodeLookupRequest.xsd new file mode 100644 index 0000000000..59f44652c0 --- /dev/null +++ b/modules/connectors/usps/schemas/ZipCodeLookupRequest.xsd @@ -0,0 +1,25 @@ + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/ZipCodeLookupResponse.xsd b/modules/connectors/usps/schemas/ZipCodeLookupResponse.xsd new file mode 100644 index 0000000000..186c913b54 --- /dev/null +++ b/modules/connectors/usps/schemas/ZipCodeLookupResponse.xsd @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/eVSCancelRequest.xsd b/modules/connectors/usps/schemas/eVSCancelRequest.xsd new file mode 100644 index 0000000000..ccb9c0c359 --- /dev/null +++ b/modules/connectors/usps/schemas/eVSCancelRequest.xsd @@ -0,0 +1,11 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/eVSCancelResponse.xsd b/modules/connectors/usps/schemas/eVSCancelResponse.xsd new file mode 100644 index 0000000000..949de222bf --- /dev/null +++ b/modules/connectors/usps/schemas/eVSCancelResponse.xsd @@ -0,0 +1,11 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/eVSExpressMailIntlRequest.xsd b/modules/connectors/usps/schemas/eVSExpressMailIntlRequest.xsd new file mode 100644 index 0000000000..d685372ca2 --- /dev/null +++ b/modules/connectors/usps/schemas/eVSExpressMailIntlRequest.xsd @@ -0,0 +1,131 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/eVSExpressMailIntlResponse.xsd b/modules/connectors/usps/schemas/eVSExpressMailIntlResponse.xsd new file mode 100644 index 0000000000..2487d871e5 --- /dev/null +++ b/modules/connectors/usps/schemas/eVSExpressMailIntlResponse.xsd @@ -0,0 +1,27 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/eVSFirstClassMailIntlRequest.xsd b/modules/connectors/usps/schemas/eVSFirstClassMailIntlRequest.xsd new file mode 100644 index 0000000000..37caf8704a --- /dev/null +++ b/modules/connectors/usps/schemas/eVSFirstClassMailIntlRequest.xsd @@ -0,0 +1,121 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/eVSFirstClassMailIntlResponse.xsd b/modules/connectors/usps/schemas/eVSFirstClassMailIntlResponse.xsd new file mode 100644 index 0000000000..3da7c9b9da --- /dev/null +++ b/modules/connectors/usps/schemas/eVSFirstClassMailIntlResponse.xsd @@ -0,0 +1,36 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/eVSGXGGetLabelRequest.xsd b/modules/connectors/usps/schemas/eVSGXGGetLabelRequest.xsd new file mode 100644 index 0000000000..61dc1cc993 --- /dev/null +++ b/modules/connectors/usps/schemas/eVSGXGGetLabelRequest.xsd @@ -0,0 +1,132 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/eVSGXGGetLabelResponse.xsd b/modules/connectors/usps/schemas/eVSGXGGetLabelResponse.xsd new file mode 100644 index 0000000000..083f648324 --- /dev/null +++ b/modules/connectors/usps/schemas/eVSGXGGetLabelResponse.xsd @@ -0,0 +1,33 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/eVSICancelRequest.xsd b/modules/connectors/usps/schemas/eVSICancelRequest.xsd new file mode 100644 index 0000000000..d1d6039c0e --- /dev/null +++ b/modules/connectors/usps/schemas/eVSICancelRequest.xsd @@ -0,0 +1,12 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/eVSICancelResponse.xsd b/modules/connectors/usps/schemas/eVSICancelResponse.xsd new file mode 100644 index 0000000000..24d139a32f --- /dev/null +++ b/modules/connectors/usps/schemas/eVSICancelResponse.xsd @@ -0,0 +1,12 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/eVSPriorityMailIntlRequest.xsd b/modules/connectors/usps/schemas/eVSPriorityMailIntlRequest.xsd new file mode 100644 index 0000000000..ae807f0a37 --- /dev/null +++ b/modules/connectors/usps/schemas/eVSPriorityMailIntlRequest.xsd @@ -0,0 +1,137 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/modules/connectors/usps/schemas/eVSPriorityMailIntlResponse.xsd b/modules/connectors/usps/schemas/eVSPriorityMailIntlResponse.xsd new file mode 100644 index 0000000000..faca50a436 --- /dev/null +++ b/modules/connectors/usps/schemas/eVSPriorityMailIntlResponse.xsd @@ -0,0 +1,41 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/schemas/eVSRequest.xsd b/modules/connectors/usps/schemas/eVSRequest.xsd new file mode 100644 index 0000000000..2cdd3ff7f9 --- /dev/null +++ b/modules/connectors/usps/schemas/eVSRequest.xsd @@ -0,0 +1,177 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/modules/connectors/usps/schemas/eVSResponse.xsd b/modules/connectors/usps/schemas/eVSResponse.xsd new file mode 100644 index 0000000000..3420904cf6 --- /dev/null +++ b/modules/connectors/usps/schemas/eVSResponse.xsd @@ -0,0 +1,53 @@ + + + + + + + + over 115000 suppressed + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps/setup.py b/modules/connectors/usps/setup.py index 0509b216d8..99a3cad715 100644 --- a/modules/connectors/usps/setup.py +++ b/modules/connectors/usps/setup.py @@ -1,5 +1,3 @@ -"""Warning: This setup.py is only there for git install until poetry support git subdirectory""" - from setuptools import setup, find_namespace_packages with open("README.md", "r") as fh: @@ -7,8 +5,8 @@ setup( name="karrio.usps", - version="2024.6-rc22", - description="Karrio - USPS Shipping Extension", + version="2024.6-rc9", + description="Karrio - USPS Shipping extension", long_description=long_description, long_description_content_type="text/markdown", url="https://github.com/karrioapi/karrio", diff --git a/modules/connectors/usps/tests/__init__.py b/modules/connectors/usps/tests/__init__.py index 7095c1fc1e..b6656feeac 100644 --- a/modules/connectors/usps/tests/__init__.py +++ b/modules/connectors/usps/tests/__init__.py @@ -1,6 +1,3 @@ - -from tests.usps.test_rate import * -from tests.usps.test_pickup import * from tests.usps.test_tracking import * +from tests.usps.test_rate import * from tests.usps.test_shipment import * -from tests.usps.test_manifest import * diff --git a/modules/connectors/usps/tests/usps/fixture.py b/modules/connectors/usps/tests/usps/fixture.py index 422e66cd33..f0f651e633 100644 --- a/modules/connectors/usps/tests/usps/fixture.py +++ b/modules/connectors/usps/tests/usps/fixture.py @@ -1,33 +1,5 @@ import karrio -import datetime -import karrio.lib as lib - -expiry = datetime.datetime.now() + datetime.timedelta(days=1) -client_id = "client_id" -client_secret = "client_secret" -cached_auth = { - f"usps|{client_id}|{client_secret}": dict( - token_type="Bearer", - issued_at="1685542319575", - client_id=client_id, - access_token="access_token", - scope="addresses international-prices subscriptions payments pickup tracking labels scan-forms companies service-delivery-standards locations international-labels prices", - expires_in="14399", - refresh_count="0", - status="approved", - expiry=expiry.strftime("%Y-%m-%d %H:%M:%S"), - issuer="api.usps.com", - application_name="Silver Shipper Developer", - api_products="[Shipping-Silver]", - public_key="LS0tLS1CRUdJTiBQVUJMSUMgS0VZLS0tLS0KTUlJQklqQU5CZ2txaGtpRzl3MEJBUUVGQUFPQ0FROEFNSUlCQ2dLQ0FRRUF4QWxwZjNSNEE1S0lwZnhJVWk1bgpMTFByZjZVZTV3MktzeGxSVzE1UWV0UzBjWGVxaW9OT2hXbDNaaVhEWEdKT3ZuK3RoY0NWVVQ3WC9JZWYvTENZCkhUWk1kYUJOdW55VHEwT2RNZmVkUU8zYUNKZmwvUnJPTHYyaG9TRDR4U1YxRzFuTTc1RTlRYitFZ1p0cmFEUXoKNW42SXRpMUMzOHFGMjU5NVRHUWVUemx3Wk1LQng1VTY2bGwzNzlkZ2plTUJxS3ppVHZHWEpOdVg5ZzRrRlBIaApTLzNERm9FNkVFSW8zUHExeDlXTnRaSm93VkRwQUVZZTQ3SU1UdXJDN2NGcXp2d3M1b1BDRHQ4c083N2lUdDN0Cm1vK3NrM2ExWnZSaGs2WUQ3Zkt1UldQVzFEYUM4dC9pazlnWnhqQndYNlZsSUhDRzRZSHlYejZteWdGV09jMmEKOVFJREFRQUIKLS0tLS1FTkQgUFVCTElDIEtFWS0tLS0t", - ) -} gateway = karrio.gateway["usps"].create( - dict( - client_id="client_id", - client_secret="client_secret", - account_number="Your Account Number", - ), - cache=lib.Cache(**cached_auth), + {"username": "username", "password": "password", "mailer_id": "847654321"} ) diff --git a/modules/connectors/usps/tests/usps/test_rate.py b/modules/connectors/usps/tests/usps/test_rate.py index 1136d0c690..3fbf2abf11 100644 --- a/modules/connectors/usps/tests/usps/test_rate.py +++ b/modules/connectors/usps/tests/usps/test_rate.py @@ -1,168 +1,234 @@ import unittest -from unittest.mock import patch, ANY +import urllib.parse +from unittest.mock import patch +from karrio.core.utils import DP +from karrio.core.models import RateRequest +from karrio import Rating from .fixture import gateway -from tests import logger - -import karrio -import karrio.lib as lib -import karrio.core.models as models class TestUSPSRating(unittest.TestCase): def setUp(self): self.maxDiff = None - self.RateRequest = models.RateRequest(**RatePayload) + self.RateRequest = RateRequest(**RATE_PAYLOAD) def test_create_rate_request(self): request = gateway.mapper.create_rate_request(self.RateRequest) - logger.debug(request.serialize()) - self.assertEqual(request.serialize(), RateRequest) + self.assertEqual(request.serialize(), RATE_REQUEST_XML) - def test_get_rate(self): - with patch("karrio.mappers.usps.proxy.lib.request") as mock: - mock.return_value = "{}" - karrio.Rating.fetch(self.RateRequest).from_(gateway) + @patch("karrio.mappers.usps.proxy.http", return_value="") + def test_get_rates(self, http_mock): + Rating.fetch(self.RateRequest).from_(gateway) - self.assertEqual( - mock.call_args[1]["url"], - f"{gateway.settings.server_url}/v3/total-rates/search", - ) + url = http_mock.call_args[1]["url"] + self.assertEqual( + url, f"{gateway.settings.server_url}?{urllib.parse.urlencode(RATE_REQUEST)}" + ) def test_parse_rate_response(self): - with patch("karrio.mappers.usps.proxy.lib.request") as mock: - mock.return_value = RateResponse - parsed_response = ( - karrio.Rating.fetch(self.RateRequest).from_(gateway).parse() - ) - logger.debug(lib.to_dict(parsed_response)) - self.assertListEqual(lib.to_dict(parsed_response), ParsedRateResponse) + with patch("karrio.mappers.usps.proxy.http") as mock: + mock.return_value = RATE_RESPONSE_XML + parsed_response = Rating.fetch(self.RateRequest).from_(gateway).parse() + + self.assertListEqual(DP.to_dict(parsed_response), PARSED_RATE_RESPONSE) + + def test_parse_rate_response_errors(self): + with patch("karrio.mappers.usps.proxy.http") as mock: + mock.return_value = ERROR_XML + parsed_response = Rating.fetch(self.RateRequest).from_(gateway).parse() + + self.assertListEqual(DP.to_dict(parsed_response), PARSED_ERRORS) if __name__ == "__main__": unittest.main() -RatePayload = { - "shipper": { - "company_name": "ABC Corp.", - "address_line1": "1098 N Fraser Street", - "city": "Georgetown", - "postal_code": "29440", - "country_code": "US", - "person_name": "Tall Tom", - "phone_number": "8005554526", - "state_code": "SC", - }, - "recipient": { - "company_name": "Horizon", - "address_line1": "1309 S Agnew Avenue", - "address_line2": "Apt 303", - "city": "Oklahoma City", - "postal_code": "73108", - "country_code": "US", - "person_name": "Lina Smith", - "phone_number": "+1 123 456 7890", - "state_code": "OK", - }, +RATE_PAYLOAD = { + "shipper": {"postal_code": "44106"}, + "recipient": {"postal_code": "20770"}, "parcels": [ { - "height": 50, - "length": 50, - "weight": 20, - "width": 12, - "dimension_unit": "CM", - "weight_unit": "KG", + "width": 5, + "height": 5, + "length": 3, + "weight": 1, + "weight_unit": "LB", + "dimension_unit": "IN", } ], + "services": ["usps_priority"], "options": { - "usps_label_delivery_service": True, - "usps_price_type": "RETAIL", - "shipment_date": "2024-07-28", + "usps_insurance": 55.0, + "usps_signature_confirmation": True, }, - "services": ["usps_parcel_select"], - "reference": "REF-001", } -ParsedRateResponse = [ +PARSED_RATE_RESPONSE = [ [ { "carrier_id": "usps", "carrier_name": "usps", "currency": "USD", - "extra_charges": [ - {"amount": 3.35, "currency": "USD", "name": "Base Charge"}, - {"amount": 3.35, "currency": "USD", "name": "string"}, - {"amount": 3.35, "currency": "USD", "name": "Adult Signature Required"}, - ], - "meta": {"service_name": "usps_parcel_select", "zone": "01"}, - "service": "usps_parcel_select", - "total_charge": 3.35, - } + "meta": {"service_name": "usps_priority_mail_express"}, + "service": "usps_priority_mail_express", + "total_charge": 31.15, + }, + { + "carrier_id": "usps", + "carrier_name": "usps", + "currency": "USD", + "meta": {"service_name": "usps_priority_mail_express_hold_for_pickup"}, + "service": "usps_priority_mail_express_hold_for_pickup", + "total_charge": 31.15, + }, + { + "carrier_id": "usps", + "carrier_name": "usps", + "currency": "USD", + "meta": { + "service_name": "usps_priority_mail_express_sunday_holiday_delivery" + }, + "service": "usps_priority_mail_express_sunday_holiday_delivery", + "total_charge": 43.65, + }, + { + "carrier_id": "usps", + "carrier_name": "usps", + "currency": "USD", + "meta": {"service_name": "usps_priority_mail"}, + "service": "usps_priority_mail", + "total_charge": 8.85, + }, + { + "carrier_id": "usps", + "carrier_name": "usps", + "currency": "USD", + "meta": {"service_name": "usps_priority_mail_large_flat_rate_box"}, + "service": "usps_priority_mail_large_flat_rate_box", + "total_charge": 21.9, + }, + { + "carrier_id": "usps", + "carrier_name": "usps", + "currency": "USD", + "meta": {"service_name": "usps_priority_mail_medium_flat_rate_box"}, + "service": "usps_priority_mail_medium_flat_rate_box", + "total_charge": 15.5, + }, + { + "carrier_id": "usps", + "carrier_name": "usps", + "currency": "USD", + "meta": {"service_name": "usps_media_mail"}, + "service": "usps_media_mail", + "total_charge": 3.45, + }, + { + "carrier_id": "usps", + "carrier_name": "usps", + "currency": "USD", + "meta": {"service_name": "usps_library_mail"}, + "service": "usps_library_mail", + "total_charge": 3.28, + }, ], [], ] - -RateRequest = [ - { - "accountNumber": "Your Account Number", - "accountType": "EPS", - "destinationZIPCode": "73108", - "extraServices": [415], - "height": 19.69, - "length": 19.69, - "mailClasses": ["PARCEL_SELECT"], - "mailingDate": "2024-07-28", - "originZIPCode": "29440", - "priceType": "RETAIL", - "weight": 44.1, - "width": 4.72, - } +PARSED_ERRORS = [ + [], + [ + { + "carrier_name": "usps", + "carrier_id": "usps", + "code": "-2147218040", + "message": "Invalid International Mail Type", + } + ], ] -RateResponse = """{ - "rateOptions": [ - { - "totalBasePrice": 3.35, - "rates": [ - { - "SKU": "DPXX0XXXXX07200", - "description": "string", - "priceType": "RETAIL", - "price": 3.35, - "weight": 5, - "dimWeight": 5, - "fees": [ - { - "name": "string", - "SKU": "string", - "price": 0 - } - ], - "startDate": "2021-07-16", - "endDate": "2021-07-16", - "mailClass": "PARCEL_SELECT", - "zone": "01" - } - ], - "extraServices": [ - { - "extraService": "922", - "name": "Adult Signature Required", - "SKU": "DPXX0XXXXX07200", - "priceType": "RETAIL", - "price": 3.35, - "warnings": [ - { - "warningCode": "string", - "warningDescription": "string" - } - ] - } - ], - "totalPrice": 3.35 - } - ] -} +ERROR_XML = """ + + -2147218040 + IntlPostage;clsIntlPostage.CalcAllPostageDimensionsXML;IntlRateV2.ProcessRequest + Invalid International Mail Type + + 1000440 + +""" + +RATE_REQUEST_XML = """ + 2 + + Priority + 44106 + 20770 + 0 + 16 + VARIABLE + 5 + 3 + 5 + + 100 + 108 + + false + + +""" + +RATE_REQUEST = {"API": "RateV4", "XML": RATE_REQUEST_XML} + +RATE_RESPONSE_XML = """ + + + 44106 + 20770 + 1 + 16. + FALSE + 3 + + Priority Mail Express 2-Day&lt;sup&gt;&#8482;&lt;/sup&gt; + 31.15 + 2-Day + + + Priority Mail Express 2-Day&lt;sup&gt;&#8482;&lt;/sup&gt; Hold For Pickup + 31.15 + 2-Day + + + Priority Mail Express 2-Day&lt;sup&gt;&#8482;&lt;/sup&gt; Sunday/Holiday Delivery + 43.65 + 2-Day + + + Priority Mail 2-Day&lt;sup&gt;&#8482;&lt;/sup&gt; + 8.85 + 2-Day + + + Priority Mail 2-Day&lt;sup&gt;&#8482;&lt;/sup&gt; Large Flat Rate Box + 21.90 + 2-Day + + + Priority Mail 2-Day&lt;sup&gt;&#8482;&lt;/sup&gt; Medium Flat Rate Box + 15.50 + 2-Day + + + Media Mail Parcel + 3.45 + + + Library Mail Parcel + 3.28 + + + """ diff --git a/modules/connectors/usps/tests/usps/test_shipment.py b/modules/connectors/usps/tests/usps/test_shipment.py index 98271076fe..58c5438a69 100644 --- a/modules/connectors/usps/tests/usps/test_shipment.py +++ b/modules/connectors/usps/tests/usps/test_shipment.py @@ -1,73 +1,70 @@ import unittest +import urllib.parse from unittest.mock import patch, ANY -from .fixture import gateway -from tests import logger - import karrio -import karrio.lib as lib -import karrio.core.models as models +from karrio.core.utils import DP +from karrio.core.models import ShipmentRequest, ShipmentCancelRequest +from .fixture import gateway -class TestUSPSShipping(unittest.TestCase): +class TestUSPSShipment(unittest.TestCase): def setUp(self): self.maxDiff = None - self.ShipmentRequest = models.ShipmentRequest(**ShipmentPayload) - self.ShipmentCancelRequest = models.ShipmentCancelRequest( - **ShipmentCancelPayload - ) + self.ShipmentRequest = ShipmentRequest(**shipment_data) + self.ShipmentCancelRequest = ShipmentCancelRequest(**shipment_cancel_data) def test_create_shipment_request(self): - request = gateway.mapper.create_shipment_request(self.ShipmentRequest) - logger.debug(request.serialize()) - self.assertEqual(request.serialize(), ShipmentRequest) + requests = gateway.mapper.create_shipment_request(self.ShipmentRequest) + self.assertEqual(requests.serialize(), ShipmentRequestXML) def test_create_cancel_shipment_request(self): - request = gateway.mapper.create_cancel_shipment_request( + requests = gateway.mapper.create_cancel_shipment_request( self.ShipmentCancelRequest ) - logger.debug(request.serialize()) - self.assertEqual(request.serialize(), ShipmentCancelRequest) - - def test_create_shipment(self): - with patch("karrio.mappers.usps.proxy.lib.request") as mock: - mock.return_value = "{}" - karrio.Shipment.create(self.ShipmentRequest).from_(gateway) - - self.assertEqual( - mock.call_args[1]["url"], - f"{gateway.settings.server_url}/v3/label", - ) + self.assertEqual(requests.serialize(), ShipmentCancelRequestXML) + + @patch("karrio.mappers.usps.proxy.http", return_value="") + def test_create_shipment(self, http_mock): + karrio.Shipment.create(self.ShipmentRequest).from_(gateway) + + url = http_mock.call_args[1]["url"] + # print(urllib.parse.unquote(url)) + # print(urllib.parse.unquote(f"{gateway.settings.server_url}?{urllib.parse.urlencode(ShipmentRequestQuery)}")) + self.assertEqual( + url, + f"{gateway.settings.server_url}?{urllib.parse.urlencode(ShipmentRequestQuery)}", + ) - def test_cancel_shipment(self): - with patch("karrio.mappers.usps.proxy.lib.request") as mock: - mock.return_value = "{}" - karrio.Shipment.cancel(self.ShipmentCancelRequest).from_(gateway) + @patch("karrio.mappers.usps.proxy.http", return_value="") + def test_cancel_shipment(self, http_mock): + karrio.Shipment.cancel(self.ShipmentCancelRequest).from_(gateway) - self.assertEqual( - mock.call_args[1]["url"], - f"{gateway.settings.server_url}/v3/label/794947717776", - ) + url = http_mock.call_args[1]["url"] + self.assertEqual( + url, + f"{gateway.settings.server_url}?{urllib.parse.urlencode(ShipmentCancelRequestQuery)}", + ) def test_parse_shipment_response(self): - with patch("karrio.mappers.usps.proxy.lib.request") as mock: - mock.return_value = ShipmentResponse + with patch("karrio.mappers.usps.proxy.http") as mocks: + mocks.return_value = ShipmentResponseXML parsed_response = ( karrio.Shipment.create(self.ShipmentRequest).from_(gateway).parse() ) - logger.debug(lib.to_dict(parsed_response)) - self.assertListEqual(lib.to_dict(parsed_response), ParsedShipmentResponse) + + self.assertListEqual(DP.to_dict(parsed_response), ParsedShipmentResponse) def test_parse_cancel_shipment_response(self): - with patch("karrio.mappers.usps.proxy.lib.request") as mock: - mock.return_value = ShipmentCancelResponse + with patch("karrio.mappers.usps.proxy.http") as mocks: + mocks.return_value = ShipmentCancelResponseXML parsed_response = ( karrio.Shipment.cancel(self.ShipmentCancelRequest) .from_(gateway) .parse() ) - logger.debug(lib.to_dict(parsed_response)) - self.assertListEqual( - lib.to_dict(parsed_response), ParsedCancelShipmentResponse + + self.assertEqual( + DP.to_dict(parsed_response), DP.to_dict(ParsedShipmentCancelResponse) ) @@ -75,18 +72,12 @@ def test_parse_cancel_shipment_response(self): unittest.main() -ShipmentPayload = { +shipment_cancel_data = { + "shipment_identifier": "123456789012", +} + +shipment_data = { "shipper": { - "company_name": "ABC Corp.", - "address_line1": "1098 N Fraser Street", - "city": "Georgetown", - "postal_code": "29440", - "country_code": "US", - "person_name": "Tall Tom", - "phone_number": "8005554526", - "state_code": "SC", - }, - "recipient": { "company_name": "Horizon", "address_line1": "1309 S Agnew Avenue", "address_line2": "Apt 303", @@ -97,232 +88,150 @@ def test_parse_cancel_shipment_response(self): "phone_number": "+1 123 456 7890", "state_code": "OK", }, + "recipient": { + "company_name": "ABC Corp.", + "address_line1": "1098 N Fraser Street", + "city": "Georgetown", + "postal_code": "29440", + "country_code": "US", + "person_name": "Tall Tom", + "phone_number": "8005554526", + "state_code": "SC", + }, "parcels": [ { - "height": 50, - "length": 50, - "weight": 20, + "height": 9, + "length": 6, "width": 12, + "weight": 20.0, "dimension_unit": "CM", "weight_unit": "KG", + "options": { + "insurance": 75.0, + }, } ], - "service": "carrier_service", - "options": { - "signature_required": True, - "shipment_date": "2024-07-28", - }, - "reference": "#Order 11111", + "service": "usps_priority_mail_express_flat_rate_boxes", + "options": {"shipment_date": "2024-04-02"}, } -ShipmentCancelPayload = { - "shipment_identifier": "794947717776", -} ParsedShipmentResponse = [ { "carrier_id": "usps", "carrier_name": "usps", - "docs": {"invoice": ANY, "label": ANY}, - "label_type": "PDF", + "shipment_identifier": "420063719270110101010XXXXXXXXX", + "tracking_number": "420063719270110101010XXXXXXXXX", + "docs": {"label": ANY}, "meta": { - "SKU": "string", - "labelBrokerID": "string", - "postage": 0, - "routingInformation": "string", + "carrier_tracking_link": "https://tools.usps.com/go/TrackConfirmAction?tLabels=420063719270110101010XXXXXXXXX" }, - "shipment_identifier": "string", - "tracking_number": "string", }, [], ] -ParsedCancelShipmentResponse = [ +ParsedShipmentCancelResponse = [ { "carrier_id": "usps", "carrier_name": "usps", - "operation": "Cancel Shipment", + "operation": "Shipment Cancel", "success": True, }, [], ] +ShipmentRequestXML = """ + 1 + + 6X4LABEL + + 1 + 1 + + + Lina Smith + Horizon + Apt 303 + 1309 S Agnew Avenue + Oklahoma City + OK + 73108 + + 1234567890 + true + Tall Tom + ABC Corp. + + 1098 N Fraser Street + Georgetown + SC + 29440 + + 8005554526 + true + 705.48 + PRIORITY EXPRESS + VARIABLE + 4.72 + 2.36 + 3.54 + 75 + 04/02/2024 + 847654321 + Lina Smith + Tall Tom + SEPARATE PAGE + PDF + RETURN + + +""" -ShipmentRequest = [ - { - "fromAddress": { - "ZIPPlus4": "29440", - "city": "Georgetown", - "firm": "ABC Corp.", - "firstName": "Tall Tom", - "ignoreBadAddress": True, - "phone": "8005554526", - "streetAddress": "1098 N Fraser Street", - }, - "imageInfo": { - "imageType": "PDF", - "labelType": "4X6LABEL", - "receiptOption": "SEPARATE_PAGE", - }, - "packageDescription": { - "customerReference": [ - {"printReferenceNumber": True, "referenceNumber": "#Order 11111"} - ], - "destinationEntryFacilityType": "NONE", - "dimensionsUOM": "in", - "girth": 124.0, - "height": 19.69, - "inductionZIPCode": "29440", - "length": 19.69, - "mailClass": "carrier_service", - "mailingDate": "2024-07-28", - "processingCategory": "NON_MACHINABLE", - "rateIndicator": "SP", - "weight": 44.1, - "weightUOM": "lb", - "width": 4.72, - }, - "senderAddress": { - "ZIPPlus4": "29440", - "city": "Georgetown", - "firm": "ABC Corp.", - "firstName": "Tall Tom", - "ignoreBadAddress": True, - "phone": "8005554526", - "streetAddress": "1098 N Fraser Street", - }, - "toAddress": { - "ZIPCode": "73108", - "city": "Oklahoma City", - "firm": "Horizon", - "firstName": "Lina Smith", - "ignoreBadAddress": True, - "phone": "+1 123 456 7890", - "secondaryAddress": "Apt 303", - "streetAddress": "1309 S Agnew Avenue", - }, - } -] +ShipmentRequestQuery = {"API": "eVS", "XML": ShipmentRequestXML} -ShipmentCancelRequest = [{"trackingNumber": "794947717776"}] +ShipmentCancelRequestXML = """ + 123456789012 + +""" -ShipmentResponse = """{ - "labelMetadata": { - "labelAddress": { - "streetAddress": "string", - "streetAddressAbbreviation": "string", - "secondaryAddress": "string", - "cityAbbreviation": "string", - "city": "string", - "state": "st", - "ZIPCode": "string", - "ZIPPlus4": "string", - "urbanization": "string", - "firstName": "string", - "lastName": "string", - "firm": "string", - "phone": "string", - "email": "user@example.com", - "ignoreBadAddress": true - }, - "routingInformation": "string", - "trackingNumber": "string", - "constructCode": "string", - "SKU": "string", - "postage": 0, - "extraServices": [ - { - "name": "string", - "SKU": "string", - "price": 0 - } - ], - "zone": "string", - "commitment": { - "name": "string", - "scheduleDeliveryDate": "string" - }, - "weightUOM": "string", - "weight": 0, - "dimensionalWeight": 0, - "fees": [ - { - "name": "string", - "SKU": "string", - "price": 0 - } - ], - "permitHolderName": "string", - "inductionType": {}, - "labelBrokerID": "string", - "links": [ - { - "rel": ["string"], - "title": "string", - "href": "http://example.com", - "method": "GET", - "submissionMediaType": "string", - "targetMediaType": "string" - } - ] - }, - "returnLabelMetadata": { - "labelAddress": { - "streetAddress": "string", - "streetAddressAbbreviation": "string", - "secondaryAddress": "string", - "cityAbbreviation": "string", - "city": "string", - "state": "st", - "ZIPCode": "string", - "ZIPPlus4": "string", - "urbanization": "string", - "firstName": "string", - "lastName": "string", - "firm": "string", - "phone": "string", - "email": "user@example.com", - "ignoreBadAddress": true - }, - "routingInformation": "string", - "trackingNumber": "string", - "SKU": "string", - "postage": 0, - "extraServices": [ - { - "name": "string", - "SKU": "string", - "price": 0 - } - ], - "zone": "string", - "weightUOM": "string", - "weight": 0, - "dimensionalWeight": 0, - "fees": [ - { - "name": "string", - "SKU": "string", - "price": 0 - } - ], - "labelBrokerID": "string", - "links": [ - { - "rel": ["string"], - "title": "string", - "href": "http://example.com", - "method": "GET", - "submissionMediaType": "string", - "targetMediaType": "string" - } - ] - }, - "labelImage": "string", - "receiptImage": "string", - "returnLabelImage": "string", - "returnReceiptImage": "string" -} +ShipmentCancelRequestQuery = {"API": "eVSCancel", "XML": ShipmentCancelRequestXML} + +ShipmentResponseXML = """ + 420063719270110101010XXXXXXXXX + SUkqAAgAAAASAP4ABAAB + TALL TOM + ABC CORP. + + 1098 N FRASER ST + GEORGETOWN + SC + 29440 + 2849 + 294402849981 + 0006 + 8.76 + + + 120 + Adult Signature Restricted Delivery + 6.90 + + + 05 + C002 + Not Valid Test Label + ePostage + + + 3-Day + 2020-10-05 + + """ -ShipmentCancelResponse = """{"ok": true}""" +ShipmentCancelResponseXML = """ + 420902109411202901089817001111 + Cancelled + Order Cancelled Successfully + +""" diff --git a/modules/connectors/usps/tests/usps/test_tracking.py b/modules/connectors/usps/tests/usps/test_tracking.py index 87dac33361..88eaa6cf95 100644 --- a/modules/connectors/usps/tests/usps/test_tracking.py +++ b/modules/connectors/usps/tests/usps/test_tracking.py @@ -1,225 +1,147 @@ import unittest -from unittest.mock import patch, ANY +from unittest.mock import patch from .fixture import gateway -from tests import logger - -import karrio -import karrio.lib as lib -import karrio.core.models as models +from karrio.core.utils import DP +from karrio.core.models import TrackingRequest +from karrio import Tracking class TestUSPSTracking(unittest.TestCase): def setUp(self): self.maxDiff = None - self.TrackingRequest = models.TrackingRequest(**TrackingPayload) + self.TrackingRequest = TrackingRequest(tracking_numbers=TRACKING_PAYLOAD) def test_create_tracking_request(self): request = gateway.mapper.create_tracking_request(self.TrackingRequest) - logger.debug(request.serialize()) - self.assertEqual(request.serialize(), TrackingRequest) - - def test_get_tracking(self): - with patch("karrio.mappers.usps.proxy.lib.request") as mock: - mock.return_value = "{}" - karrio.Tracking.fetch(self.TrackingRequest).from_(gateway) - - self.assertEqual( - mock.call_args[1]["url"], - f"{gateway.settings.server_url}/v3/tracking/89108749065090", - ) + self.assertEqual(request.serialize(), TRACKING_REQUEST) def test_parse_tracking_response(self): - with patch("karrio.mappers.usps.proxy.lib.request") as mock: - mock.return_value = TrackingResponse + with patch("karrio.mappers.usps.proxy.http") as mock: + mock.return_value = TRACKING_RESPONSE parsed_response = ( - karrio.Tracking.fetch(self.TrackingRequest).from_(gateway).parse() + Tracking.fetch(self.TrackingRequest).from_(gateway).parse() ) - logger.debug(lib.to_dict(parsed_response)) - self.assertListEqual(lib.to_dict(parsed_response), ParsedTrackingResponse) - def test_parse_error_response(self): - with patch("karrio.mappers.usps.proxy.lib.request") as mock: - mock.return_value = ErrorResponse - parsed_response = ( - karrio.Tracking.fetch(self.TrackingRequest).from_(gateway).parse() - ) - logger.debug(lib.to_dict(parsed_response)) - self.assertListEqual(lib.to_dict(parsed_response), ParsedErrorResponse) + self.assertListEqual(DP.to_dict(parsed_response), PARSED_TRACKING_RESPONSE) if __name__ == "__main__": unittest.main() -TrackingPayload = { - "tracking_numbers": ["89108749065090"], -} +TRACKING_PAYLOAD = ["XXXXXXXXXXXX1"] -ParsedTrackingResponse = [ +PARSED_TRACKING_RESPONSE = [ [ { "carrier_id": "usps", "carrier_name": "usps", "delivered": False, - "estimated_delivery": "2019-08-24", "events": [ { - "code": "string", - "date": "2019-08-24", - "description": "string", - "location": "string, string, string, string", - "time": "14:15 PM", - } + "code": "10", + "date": "2016-01-06", + "description": "Arrived at USPS Facility", + "location": "COLUMBUS, OH, 43218", + "time": "10:45 AM", + }, + { + "code": "03", + "date": "2016-01-06", + "description": "Acceptance", + "location": "LAKE CHARLES, IL, 12345", + "time": "09:10 AM", + }, ], + "tracking_number": "XXXXXXXXXX1", "info": { - "carrier_tracking_link": "https://tools.usps.com/go/TrackConfirmAction?tLabels=string", - "expected_delivery": "2019-08-24", - "shipment_destination_country": "string", - "shipment_destination_postal_code": "string", - "shipment_origin_country": "st", - "shipment_origin_postal_code": "strin", - "shipment_service": "string", + "carrier_tracking_link": "https://tools.usps.com/go/TrackConfirmAction?tLabels=XXXXXXXXXX1", + "shipment_destination_postal_code": 12345, + "shipment_origin_postal_code": "12345", + "shipment_service": "First-Class Package Service - Retail", }, "status": "in_transit", - "tracking_number": "string", } ], [], ] -ParsedErrorResponse = [ - [], - [ - { - "carrier_id": "usps", - "carrier_name": "usps", - "code": "string", - "details": { - "errors": [ - { - "code": "string", - "detail": "string", - "source": {"example": "string", "parameter": "string"}, - "status": "string", - "title": "string", - } - ], - "tracking_number": "89108749065090", - }, - "message": "string", - } - ], -] - -TrackingRequest = ["89108749065090"] - -TrackingResponse = """{ - "trackingNumber": "string", - "additionalInfo": "string", - "ADPScripting": "string", - "archiveRestoreInfo": "string", - "associatedLabel": "string", - "carrierRelease": true, - "mailClass": "BOUND_PRINTED_MATTER", - "destinationCity": "string", - "destinationCountryCode": "string", - "destinationState": "st", - "destinationZIP": "string", - "editedLabelId": "string", - "emailEnabled": true, - "endOfDay": "string", - "eSOFEligible": true, - "expectedDeliveryTimeStamp": "2019-08-24T14:15:22Z", - "expectedDeliveryType": "string", - "guaranteedDeliveryTimeStamp": "2019-08-24T14:15:22Z", - "guaranteedDetails": "string", - "itemShape": "LETTER", - "kahalaIndicator": true, - "mailType": "INTERNATIONAL_INBOUND", - "approximateIntakeDate": "string", - "uniqueTrackingId": "string", - "onTime": true, - "originCity": "string", - "originCountry": "st", - "originState": "str", - "originZIP": "strin", - "proofOfDeliveryEnabled": true, - "predictedDeliveryTimeStamp": "2019-08-24T14:15:22Z", - "predictedDeliveryDate": "2019-08-24", - "predictedDeliveryWindowStartTime": "string", - "predictedDeliveryWindowEndTime": "string", - "relatedReturnReceiptID": "string", - "redeliveryEnabled": true, - "enabledNotificationRequests": { - "SMS": { - "futureDelivery": true, - "alertDelivery": true, - "todayDelivery": true, - "UP": true, - "DND": true - }, - "EMail": { - "futureDelivery": true, - "alertDelivery": true, - "todayDelivery": true, - "UP": true, - "DND": true, - "firstDisplayable": true, - "otherActivity": true - } - }, - "restoreEnabled": true, - "returnDateNotice": "2019-08-24", - "RRAMenabled": true, - "RREEnabled": true, - "services": ["string"], - "serviceTypeCode": "string", - "status": "string", - "statusCategory": "string", - "statusSummary": "Your item was delivered at 12:55 pm on April 05, 2010 in FALMOUTH, MA 02540", - "trackingProofOfDeliveryEnabled": true, - "valueofArticle": "string", - "extendRetentionPurchasedCode": "string", - "extendRetentionExtraServiceCodeOptions": [{}], - "trackingEvents": [ - { - "eventType": "string", - "eventTimestamp": "2019-08-24T14:15:22Z", - "GMTTimestamp": "2024-04-04T14:03:12.041Z", - "GMTOffset": "-7:00", - "eventCountry": "string", - "eventCity": "string", - "eventState": "string", - "eventZIP": "string", - "firm": "string", - "name": "string", - "authorizedAgent": true, - "eventCode": "string", - "actionCode": "string", - "reasonCode": "string" - } - ] -} +TRACKING_REQUEST = f""" + 1 + 127.0.0.1 + Karrio + + """ -ErrorResponse = """{ - "apiVersion": "string", - "error": { - "code": "string", - "message": "string", - "errors": [ - { - "status": "string", - "code": "string", - "title": "string", - "detail": "string", - "source": { - "parameter": "string", - "example": "string" - } - } - ] - } -} +TRACKING_RESPONSE = """ + + + First-Class Package Service - Retail + BP + KBEA + TX + 12345 + true + false + DM + 2016-01-08 10:34:04.000000 + 412725500 + LAKE CHARLES + IL + 12345 + false + false + false + false + USPS Tracking<SUP>&#174;</SUP> + 346 + Arrived at facility + In Transit + Your item arrived at our USPS facility in COLUMBUS, OH 43218 on January 6, 2016 at 10:45 pm. The item is currently in transit to the destination. + T + true + + + true + true + true + true + true + true + true + + + true + true + true + true + true + true + true + + + + 10:45 pm + January 6, 2016 + Arrived at USPS Facility + COLUMBUS + OH + 43218 + false + 10 + + + 9:10 am + January 6, 2016 + Acceptance + LAKE CHARLES + IL + 12345 + false + 03 + + + """ diff --git a/modules/connectors/usps_international/README.md b/modules/connectors/usps_international/README.md index 39dc9b3340..cde0567e86 100644 --- a/modules/connectors/usps_international/README.md +++ b/modules/connectors/usps_international/README.md @@ -1,7 +1,6 @@ +# karrio.usps_international -# karrio.usps - -This package is a USPS extension of the [karrio](https://pypi.org/project/karrio) multi carrier shipping SDK. +This package is a USPS International extension of the [karrio](https://pypi.org/project/karrio) multi carrier shipping SDK. ## Requirements @@ -10,7 +9,7 @@ This package is a USPS extension of the [karrio](https://pypi.org/project/karrio ## Installation ```bash -pip install karrio.usps +pip install karrio.usps_international ``` ## Usage @@ -21,7 +20,7 @@ from karrio.mappers.usps_international.settings import Settings # Initialize a carrier gateway -usps = karrio.gateway["usps"].create( +canadapost = karrio.gateway["usps"].create( Settings( ... ) diff --git a/modules/connectors/usps_international/generate b/modules/connectors/usps_international/generate index 65ac0f6376..ffe33b1c62 100755 --- a/modules/connectors/usps_international/generate +++ b/modules/connectors/usps_international/generate @@ -1,24 +1,68 @@ SCHEMAS=./schemas LIB_MODULES=./karrio/schemas/usps_international -find "${LIB_MODULES}" -name "*.py" -exec rm -r {} \; -touch "${LIB_MODULES}/__init__.py" +mkdir -p $LIB_MODULES +find $LIB_MODULES -name "*.py" -exec rm -r {} \; +touch $LIB_MODULES/__init__.py -quicktype() { - echo "Generating $1..." - docker run -it --rm --name quicktype -v $PWD:/app -e SCHEMAS=/app/schemas -e LIB_MODULES=/app/karrio/schemas/usps \ - karrio/tools /quicktype/script/quicktype --no-uuids --no-date-times --no-enums --src-lang json --lang jstruct \ - --no-nice-property-names --all-properties-optional --type-as-suffix $@ -} - -quicktype --src="${SCHEMAS}/error_response.json" --out="${LIB_MODULES}/error_response.py" -quicktype --src="${SCHEMAS}/label_request.json" --out="${LIB_MODULES}/label_request.py" -quicktype --src="${SCHEMAS}/label_response.json" --out="${LIB_MODULES}/label_response.py" -quicktype --src="${SCHEMAS}/pickup_request.json" --out="${LIB_MODULES}/pickup_request.py" -quicktype --src="${SCHEMAS}/pickup_response.json" --out="${LIB_MODULES}/pickup_response.py" -quicktype --src="${SCHEMAS}/pickup_update_request.json" --out="${LIB_MODULES}/pickup_update_request.py" -quicktype --src="${SCHEMAS}/pickup_update_response.json" --out="${LIB_MODULES}/pickup_update_response.py" -quicktype --src="${SCHEMAS}/rate_request.json" --out="${LIB_MODULES}/rate_request.py" -quicktype --src="${SCHEMAS}/rate_response.json" --out="${LIB_MODULES}/rate_response.py" -quicktype --src="${SCHEMAS}/scan_form_request.json" --out="${LIB_MODULES}/scan_form_request.py" -quicktype --src="${SCHEMAS}/scan_form_response.json" --out="${LIB_MODULES}/scan_form_response.py" -quicktype --src="${SCHEMAS}/tracking_response.json" --out="${LIB_MODULES}/tracking_response.py" +generateDS --no-namespace-defs -o "$LIB_MODULES/address_validate_request.py" $SCHEMAS/AddressValidateRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/address_validate_response.py" $SCHEMAS/AddressValidateResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/carrier_pickup_availability_request.py" $SCHEMAS/CarrierPickupAvailabilityRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/carrier_pickup_availability_response.py" $SCHEMAS/CarrierPickupAvailabilityResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/carrier_pickup_cancel_request.py" $SCHEMAS/CarrierPickupCancelRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/carrier_pickup_cancel_response.py" $SCHEMAS/CarrierPickupCancelResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/carrier_pickup_change_request.py" $SCHEMAS/CarrierPickupChangeRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/carrier_pickup_change_response.py" $SCHEMAS/CarrierPickupChangeResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/carrier_pickup_inquiry_request.py" $SCHEMAS/CarrierPickupInquiryRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/carrier_pickup_inquiry_response.py" $SCHEMAS/CarrierPickupInquiryResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/carrier_pickup_schedule_request.py" $SCHEMAS/CarrierPickupScheduleRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/carrier_pickup_schedule_response.py" $SCHEMAS/CarrierPickupScheduleResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/city_state_lookup_request.py" $SCHEMAS/CityStateLookupRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/city_state_lookup_response.py" $SCHEMAS/CityStateLookupResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/emrsv4_0_bulk_request.py" $SCHEMAS/EMRSV4.0BulkRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/error.py" $SCHEMAS/Error.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/express_mail_commitment_request.py" $SCHEMAS/ExpressMailCommitmentRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/express_mail_commitment_response.py" $SCHEMAS/ExpressMailCommitmentResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/first_class_mail_request.py" $SCHEMAS/FirstClassMailRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/first_class_mail_response.py" $SCHEMAS/FirstClassMailResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/hfp_facility_info_request.py" $SCHEMAS/HFPFacilityInfoRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/hfp_facility_info_response.py" $SCHEMAS/HFPFacilityInfoResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/intl_rate_v2_request.py" $SCHEMAS/IntlRateV2Request.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/intl_rate_v2_response.py" $SCHEMAS/IntlRateV2Response.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/mrsv4_0_request.py" $SCHEMAS/MRSV4.0Request.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/pts_emailresult.py" $SCHEMAS/PTSEmailResult.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/pts_email_request.py" $SCHEMAS/PTSEmailRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/ptspod_result.py" $SCHEMAS/PTSPODResult.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/ptspod_request.py" $SCHEMAS/PTSPodRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/ptsrre_result.py" $SCHEMAS/PTSRREResult.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/ptsrre_request.py" $SCHEMAS/PTSRreRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/ptstpod_result.py" $SCHEMAS/PTSTPODResult.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/ptstpod_request.py" $SCHEMAS/PTSTPodRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/priority_mail_request.py" $SCHEMAS/PriorityMailRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/priority_mail_response.py" $SCHEMAS/PriorityMailResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/rate_v4_request.py" $SCHEMAS/RateV4Request.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/rate_v4_response.py" $SCHEMAS/RateV4Response.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/scan_request.py" $SCHEMAS/SCANRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/scan_response.py" $SCHEMAS/SCANResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/sdc_get_locations_request.py" $SCHEMAS/SDCGetLocationsRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/sdc_get_locations_response.py" $SCHEMAS/SDCGetLocationsResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/standard_b_request.py" $SCHEMAS/StandardBRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/standard_b_response.py" $SCHEMAS/StandardBResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/track_field_request.py" $SCHEMAS/TrackFieldRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/track_request.py" $SCHEMAS/TrackRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/track_response.py" $SCHEMAS/TrackResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/zip_code_lookup_request.py" $SCHEMAS/ZipCodeLookupRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/zip_code_lookup_response.py" $SCHEMAS/ZipCodeLookupResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/evs_cancel_request.py" $SCHEMAS/eVSCancelRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/evs_cancel_response.py" $SCHEMAS/eVSCancelResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/evs_request.py" $SCHEMAS/eVSRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/evs_response.py" $SCHEMAS/eVSResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/evs_express_mail_intl_request.py" $SCHEMAS/eVSExpressMailIntlRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/evs_express_mail_intl_response.py" $SCHEMAS/eVSExpressMailIntlResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/evs_first_class_mail_intl_request.py" $SCHEMAS/eVSFirstClassMailIntlRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/evs_first_class_mail_intl_response.py" $SCHEMAS/eVSFirstClassMailIntlResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/evs_gxg_get_label_request.py" $SCHEMAS/eVSGXGGetLabelRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/evs_gxg_get_label_response.py" $SCHEMAS/eVSGXGGetLabelResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/evsi_cancel_request.py" $SCHEMAS/eVSICancelRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/evsi_cancel_response.py" $SCHEMAS/eVSICancelResponse.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/evs_priority_mail_intl_request.py" $SCHEMAS/eVSPriorityMailIntlRequest.xsd +generateDS --no-namespace-defs -o "$LIB_MODULES/evs_priority_mail_intl_response.py" $SCHEMAS/eVSPriorityMailIntlResponse.xsd diff --git a/modules/connectors/usps_international/karrio/mappers/usps_international/__init__.py b/modules/connectors/usps_international/karrio/mappers/usps_international/__init__.py index 03077a9c94..53feec98d2 100644 --- a/modules/connectors/usps_international/karrio/mappers/usps_international/__init__.py +++ b/modules/connectors/usps_international/karrio/mappers/usps_international/__init__.py @@ -4,19 +4,16 @@ from karrio.mappers.usps_international.proxy import Proxy from karrio.mappers.usps_international.settings import Settings import karrio.providers.usps_international.units as units -import karrio.providers.usps_international.utils as utils METADATA = Metadata( id="usps_international", - label="USPS International", + label="USPS Web Tools International", # Integrations Mapper=Mapper, Proxy=Proxy, Settings=Settings, # Data Units - is_hub=False, - options=units.ShippingOption, services=units.ShippingService, - connection_configs=utils.ConnectionConfig, + options=units.ShippingOption, ) diff --git a/modules/connectors/usps_international/karrio/mappers/usps_international/mapper.py b/modules/connectors/usps_international/karrio/mappers/usps_international/mapper.py index 4cd1236375..01425b2f63 100644 --- a/modules/connectors/usps_international/karrio/mappers/usps_international/mapper.py +++ b/modules/connectors/usps_international/karrio/mappers/usps_international/mapper.py @@ -1,88 +1,103 @@ -"""Karrio USPS client mapper.""" - -import typing -import karrio.lib as lib -import karrio.api.mapper as mapper -import karrio.core.models as models -import karrio.providers.usps_international as provider -import karrio.mappers.usps_international.settings as provider_settings - - -class Mapper(mapper.Mapper): - settings: provider_settings.Settings - - def create_rate_request(self, payload: models.RateRequest) -> lib.Serializable: - return provider.rate_request(payload, self.settings) - - def create_tracking_request( - self, payload: models.TrackingRequest - ) -> lib.Serializable: - return provider.tracking_request(payload, self.settings) - - def create_shipment_request( - self, payload: models.ShipmentRequest - ) -> lib.Serializable: - return provider.shipment_request(payload, self.settings) - - def create_pickup_request(self, payload: models.PickupRequest) -> lib.Serializable: - return provider.pickup_request(payload, self.settings) - - def create_pickup_update_request( - self, payload: models.PickupUpdateRequest - ) -> lib.Serializable: - return provider.pickup_update_request(payload, self.settings) - - def create_cancel_pickup_request( - self, payload: models.PickupCancelRequest - ) -> lib.Serializable: - return provider.pickup_cancel_request(payload, self.settings) +from typing import List, Tuple +from karrio.core.utils.serializable import Serializable, Deserializable +from karrio.api.mapper import Mapper as BaseMapper +from karrio.core.models import ( + ShipmentCancelRequest, + # PickupUpdateRequest, + # PickupCancelRequest, + ShipmentRequest, + TrackingRequest, + # PickupRequest, + RateRequest, + ConfirmationDetails, + TrackingDetails, + ShipmentDetails, + # PickupDetails, + RateDetails, + Message, +) +from karrio.providers.usps_international import ( + parse_shipment_cancel_response, + # parse_pickup_update_response, + # parse_pickup_cancel_response, + parse_shipment_response, + parse_tracking_response, + # parse_pickup_response, + parse_rate_response, + shipment_cancel_request, + # pickup_update_request, + # pickup_cancel_request, + tracking_request, + shipment_request, + # pickup_request, + rate_request, +) +from karrio.mappers.usps_international.settings import Settings + + +class Mapper(BaseMapper): + settings: Settings + + def create_rate_request(self, payload: RateRequest) -> Serializable: + return rate_request(payload, self.settings) + + def create_tracking_request(self, payload: TrackingRequest) -> Serializable: + return tracking_request(payload, self.settings) + + def create_shipment_request(self, payload: ShipmentRequest) -> Serializable: + return shipment_request(payload, self.settings) + + # def create_pickup_request( + # self, payload: PickupRequest + # ) -> Serializable: + # return pickup_request(payload, self.settings) + # + # def create_pickup_update_request( + # self, payload: PickupUpdateRequest + # ) -> Serializable: + # return pickup_update_request(payload, self.settings) + # + # def create_cancel_pickup_request( + # self, payload: PickupCancelRequest + # ) -> Serializable: + # return pickup_cancel_request(payload, self.settings) def create_cancel_shipment_request( - self, payload: models.ShipmentCancelRequest - ) -> lib.Serializable[str]: - return provider.shipment_cancel_request(payload, self.settings) - - def create_manifest_request( - self, payload: models.ManifestRequest - ) -> lib.Serializable: - return provider.manifest_request(payload, self.settings) + self, payload: ShipmentCancelRequest + ) -> Serializable: + return shipment_cancel_request(payload, self.settings) - def parse_cancel_pickup_response( - self, response: lib.Deserializable[str] - ) -> typing.Tuple[models.ConfirmationDetails, typing.List[models.Message]]: - return provider.parse_pickup_cancel_response(response, self.settings) + # def parse_cancel_pickup_response( + # self, response: Deserializable + # ) -> Tuple[ConfirmationDetails, List[Message]]: + # return parse_pickup_cancel_response(response, self.settings) def parse_cancel_shipment_response( - self, response: lib.Deserializable[str] - ) -> typing.Tuple[models.ConfirmationDetails, typing.List[models.Message]]: - return provider.parse_shipment_cancel_response(response, self.settings) - - def parse_pickup_response( - self, response: lib.Deserializable[str] - ) -> typing.Tuple[models.PickupDetails, typing.List[models.Message]]: - return provider.parse_pickup_response(response, self.settings) - - def parse_pickup_update_response( - self, response: lib.Deserializable[str] - ) -> typing.Tuple[models.PickupDetails, typing.List[models.Message]]: - return provider.parse_pickup_update_response(response, self.settings) + self, response: Deserializable + ) -> Tuple[ConfirmationDetails, List[Message]]: + return parse_shipment_cancel_response(response, self.settings) + + # def parse_pickup_response( + # self, response: Deserializable + # ) -> Tuple[PickupDetails, List[Message]]: + # return parse_pickup_response(response, self.settings) + # + # def parse_pickup_update_response( + # self, response: Deserializable + # ) -> Tuple[PickupDetails, List[Message]]: + # return parse_pickup_update_response(response, self.settings) def parse_rate_response( - self, response: lib.Deserializable[str] - ) -> typing.Tuple[typing.List[models.RateDetails], typing.List[models.Message]]: - return provider.parse_rate_response(response, self.settings) + self, response: Deserializable + ) -> Tuple[List[RateDetails], List[Message]]: + return parse_rate_response(response, self.settings) def parse_shipment_response( - self, response: lib.Deserializable[str] - ) -> typing.Tuple[models.ShipmentDetails, typing.List[models.Message]]: - return provider.parse_shipment_response(response, self.settings) + self, response: Deserializable + ) -> Tuple[ShipmentDetails, List[Message]]: + return parse_shipment_response(response, self.settings) def parse_tracking_response( - self, response: lib.Deserializable[str] - ) -> typing.Tuple[typing.List[models.TrackingDetails], typing.List[models.Message]]: - return provider.parse_tracking_response(response, self.settings) - - def parse_manifest_response( - self, response: lib.Deserializable[str] - ) -> typing.Tuple[models.ManifestDetails, typing.List[models.Message]]: - return provider.parse_manifest_response(response, self.settings) + self, response: Deserializable + ) -> Tuple[List[TrackingDetails], List[Message]]: + return parse_tracking_response(response, self.settings) diff --git a/modules/connectors/usps_international/karrio/mappers/usps_international/proxy.py b/modules/connectors/usps_international/karrio/mappers/usps_international/proxy.py index 9688772795..6fb13a681a 100644 --- a/modules/connectors/usps_international/karrio/mappers/usps_international/proxy.py +++ b/modules/connectors/usps_international/karrio/mappers/usps_international/proxy.py @@ -1,151 +1,60 @@ -"""Karrio USPS client proxy.""" +import urllib.parse +from karrio.schemas.usps.track_field_request import TrackFieldRequest -import karrio.lib as lib -import karrio.api.proxy as proxy -import karrio.mappers.usps_international.settings as provider_settings +from karrio.api.proxy import Proxy as BaseProxy +from karrio.core.utils import Serializable, Deserializable, XP, request as http +from karrio.mappers.usps_international.settings import Settings -class Proxy(proxy.Proxy): - settings: provider_settings.Settings +class Proxy(BaseProxy): + settings: Settings - def get_rates(self, request: lib.Serializable) -> lib.Deserializable[str]: - response = lib.run_asynchronously( - lambda _: lib.request( - url=f"{self.settings.server_url}/v3/total-rates/search", - data=lib.to_json(_), - trace=self.trace_as("json"), - method="POST", - headers={ - "Content-Type": "application/json", - "Authorization": f"Bearer {self.settings.access_token}", - }, - ), - request.serialize(), - ) - - return lib.Deserializable(response, lambda _: [lib.to_dict(_) for _ in _]) - - def create_shipment(self, request: lib.Serializable) -> lib.Deserializable[str]: - response = lib.run_asynchronously( - lambda _: lib.request( - url=f"{self.settings.server_url}/v3/international-label", - data=lib.to_json(_), - trace=self.trace_as("json"), - method="POST", - headers={ - "Content-Type": "application/json", - "Authorization": f"Bearer {self.settings.access_token}", - }, - ), - request.serialize(), - ) + """ Proxy interface method implementations """ - return lib.Deserializable( - response, - lambda _: [lib.to_dict(_) for _ in _], - request.ctx, + def get_tracking(self, request: Serializable) -> Deserializable: + query = urllib.parse.urlencode({"API": "TrackV2", "XML": request.serialize()}) + response = http( + url=f"{self.settings.server_url}?{query}", + trace=self.trace_as("xml"), + method="GET", ) - def cancel_shipment(self, request: lib.Serializable) -> lib.Deserializable[str]: - response = lib.run_asynchronously( - lambda _: ( - _["trackingNumber"], - lib.request( - url=f"{self.settings.server_url}/v3/international-label/{_['trackingNumber']}", - data=lib.to_json(request.serialize()), - trace=self.trace_as("json"), - method="POST", - headers={ - "Content-Type": "application/json", - "Authorization": f"Bearer {self.settings.access_token}", - }, - on_ok=lambda _: '{"ok": true}', - ), - ), - request.serialize(), - ) - - return lib.Deserializable( - response, - lambda __: [(_[0], lib.to_dict(_[1])) for _ in __], - ) + return Deserializable(response, XP.to_xml) - def get_tracking(self, request: lib.Serializable) -> lib.Deserializable[str]: - response = lib.run_asynchronously( - lambda trackingNumber: ( - trackingNumber, - lib.request( - url=f"{self.settings.server_url}/v3/tracking/{trackingNumber}", - data=lib.to_json(request.serialize()), - trace=self.trace_as("json"), - method="POST", - headers={ - "Content-Type": "application/json", - "Authorization": f"Bearer {self.settings.access_token}", - }, - ), - ), - request.serialize(), + def get_rates(self, request: Serializable) -> Deserializable: + query = urllib.parse.urlencode( + {"API": "IntlRateV2", "XML": request.serialize()} ) - - return lib.Deserializable( - response, - lambda __: [(_[0], lib.to_dict(_[1])) for _ in __], - ) - - def schedule_pickup(self, request: lib.Serializable) -> lib.Deserializable[str]: - response = lib.request( - url=f"{self.settings.server_url}/v3/carrier-pickup", - data=lib.to_json(request.serialize()), - trace=self.trace_as("json"), - method="POST", - headers={ - "Content-Type": "application/json", - "Authorization": f"Bearer {self.settings.access_token}", - }, - ) - - return lib.Deserializable(response, lib.to_dict) - - def modify_pickup(self, request: lib.Serializable) -> lib.Deserializable[str]: - response = lib.request( - url=f"{self.settings.server_url}/v3/carrier-pickup/{request.ctx['confirmationNumber']}", - data=lib.to_json(request.serialize()), - trace=self.trace_as("json"), - method="POST", - headers={ - "Content-Type": "application/json", - "Authorization": f"Bearer {self.settings.access_token}", - }, + response = http( + url=f"{self.settings.server_url}?{query}", + trace=self.trace_as("xml"), + method="GET", ) - return lib.Deserializable(response, lib.to_dict) + return Deserializable(response, XP.to_xml) - def cancel_pickup(self, request: lib.Serializable) -> lib.Deserializable[str]: - response = lib.request( - url=f"{self.settings.server_url}/v3/carrier-pickup/{request.serialize()['confirmationNumber']}", - data=lib.to_json(request.serialize()), - trace=self.trace_as("json"), - method="POST", - headers={ - "Content-Type": "application/json", - "Authorization": f"Bearer {self.settings.access_token}", - }, - on_ok=lambda _: '{"ok": true}', + def create_shipment(self, request: Serializable) -> Deserializable: + tag = request.value.__class__.__name__.replace("Request", "") + api = f"{tag}Certify" if self.settings.test_mode else tag + serialized_request = request.serialize().replace(tag, api) + query = urllib.parse.urlencode({"API": api, "XML": serialized_request}) + response = http( + url=f"{self.settings.server_url}?{query}", + trace=self.trace_as("xml"), + method="GET", ) - return lib.Deserializable(response, lib.to_dict) + return Deserializable(response, XP.to_xml) - def create_manifest(self, request: lib.Serializable) -> lib.Deserializable[str]: - response = lib.request( - url=f"{self.settings.server_url}/v3/scan-form", - data=lib.to_json(request.serialize()), - trace=self.trace_as("json"), - method="POST", - headers={ - "Content-Type": "application/json", - "Authorization": f"Bearer {self.settings.access_token}", - }, + def cancel_shipment(self, request: Serializable) -> Deserializable: + tag = request.value.__class__.__name__.replace("Request", "") + api = f"{tag}Certify" if self.settings.test_mode else tag + serialized_request = request.serialize().replace(tag, api) + query = urllib.parse.urlencode({"API": api, "XML": serialized_request}) + response = http( + url=f"{self.settings.server_url}?{query}", + trace=self.trace_as("xml"), + method="GET", ) - return lib.Deserializable(response, lib.to_dict) + return Deserializable(response, XP.to_xml) diff --git a/modules/connectors/usps_international/karrio/mappers/usps_international/settings.py b/modules/connectors/usps_international/karrio/mappers/usps_international/settings.py index 19996a0b81..a5853a8bb8 100644 --- a/modules/connectors/usps_international/karrio/mappers/usps_international/settings.py +++ b/modules/connectors/usps_international/karrio/mappers/usps_international/settings.py @@ -1,20 +1,21 @@ -"""Karrio USPS client settings.""" +"""Karrio USPS International client settings.""" import attr -import karrio.providers.usps_international.utils as provider_utils +from karrio.providers.usps_international.utils import Settings as BaseSettings @attr.s(auto_attribs=True) -class Settings(provider_utils.Settings): - """USPS connection settings.""" +class Settings(BaseSettings): + """USPS International connection settings.""" - # Add carrier specific API connection properties here - client_id: str - client_secret: str - account_type: str = None - account_number: str = None + # Carrier specific properties + username: str + password: str + mailer_id: str = None + customer_registration_id: str = None + logistics_manager_mailer_id: str = None - # generic properties + # Base properties id: str = None test_mode: bool = False carrier_id: str = "usps_international" diff --git a/modules/connectors/usps_international/karrio/providers/usps_international/__init__.py b/modules/connectors/usps_international/karrio/providers/usps_international/__init__.py index 1d68fe009a..86c2be5cac 100644 --- a/modules/connectors/usps_international/karrio/providers/usps_international/__init__.py +++ b/modules/connectors/usps_international/karrio/providers/usps_international/__init__.py @@ -1,5 +1,3 @@ -"""Karrio USPS provider imports.""" - from karrio.providers.usps_international.utils import Settings from karrio.providers.usps_international.rate import parse_rate_response, rate_request from karrio.providers.usps_international.shipment import ( @@ -8,19 +6,15 @@ shipment_cancel_request, shipment_request, ) -from karrio.providers.usps_international.pickup import ( - parse_pickup_cancel_response, - parse_pickup_update_response, - parse_pickup_response, - pickup_update_request, - pickup_cancel_request, - pickup_request, -) +# from karrio.providers.usps_international.pickup import ( +# parse_pickup_cancel_response, +# parse_pickup_update_response, +# parse_pickup_response, +# pickup_update_request, +# pickup_cancel_request, +# pickup_request, +# ) from karrio.providers.usps_international.tracking import ( parse_tracking_response, tracking_request, ) -from karrio.providers.usps_international.manifest import ( - parse_manifest_response, - manifest_request, -) diff --git a/modules/connectors/usps_international/karrio/providers/usps_international/error.py b/modules/connectors/usps_international/karrio/providers/usps_international/error.py index 54f5ce4aa6..d1de1fd05c 100644 --- a/modules/connectors/usps_international/karrio/providers/usps_international/error.py +++ b/modules/connectors/usps_international/karrio/providers/usps_international/error.py @@ -1,26 +1,24 @@ -"""Karrio USPS error parser.""" +from typing import List +from karrio.schemas.usps.error import Error +from karrio.core.utils import Element, XP +from karrio.core.models import Message +from karrio.providers.usps_international.utils import Settings -import typing -import karrio.lib as lib -import karrio.core.models as models -import karrio.providers.usps_international.utils as provider_utils - -def parse_error_response( - response: typing.Union[dict, typing.List[dict]], - settings: provider_utils.Settings, - **kwargs, -) -> typing.List[models.Message]: - responses = response if isinstance(response, list) else [response] - errors: list = [response["error"] for response in responses if "error" in response] +def parse_error_response(response: Element, settings: Settings) -> List[Message]: + error_nodes = ( + [response] + if response.tag == "Error" + else response.xpath(".//*[local-name() = $name]", name="Error") + ) + errors = [XP.to_object(Error, node) for node in error_nodes] return [ - models.Message( - carrier_id=settings.carrier_id, + Message( carrier_name=settings.carrier_name, - code=error.get("code"), - message=error.get("message"), - details={**kwargs, "errors": error.get("errors", [])}, + carrier_id=settings.carrier_id, + code=str(error.Number), + message=error.Description, ) for error in errors ] diff --git a/modules/connectors/usps_international/karrio/providers/usps_international/pickup/__init__.py b/modules/connectors/usps_international/karrio/providers/usps_international/pickup/__init__.py index caf2749e46..79c067b324 100644 --- a/modules/connectors/usps_international/karrio/providers/usps_international/pickup/__init__.py +++ b/modules/connectors/usps_international/karrio/providers/usps_international/pickup/__init__.py @@ -1,12 +1,3 @@ -from karrio.providers.usps_international.pickup.create import ( - parse_pickup_response, - pickup_request, -) -from karrio.providers.usps_international.pickup.update import ( - parse_pickup_update_response, - pickup_update_request, -) -from karrio.providers.usps_international.pickup.cancel import ( - parse_pickup_cancel_response, - pickup_cancel_request, -) +from karrio.providers.usps_international.pickup.create import parse_pickup_response, pickup_request +from karrio.providers.usps_international.pickup.update import parse_pickup_update_response, pickup_update_request +from karrio.providers.usps_international.pickup.cancel import parse_pickup_cancel_response, pickup_cancel_request diff --git a/modules/connectors/usps_international/karrio/providers/usps_international/pickup/cancel.py b/modules/connectors/usps_international/karrio/providers/usps_international/pickup/cancel.py index 440edd53d9..ed5cde8b69 100644 --- a/modules/connectors/usps_international/karrio/providers/usps_international/pickup/cancel.py +++ b/modules/connectors/usps_international/karrio/providers/usps_international/pickup/cancel.py @@ -1,40 +1,50 @@ -import typing +from typing import Tuple, List +from karrio.schemas.usps.carrier_pickup_cancel_request import CarrierPickupCancelRequest +from karrio.core.utils import Serializable, SF +from karrio.core.models import PickupCancelRequest, ConfirmationDetails, Message + +from karrio.providers.usps_international.error import parse_error_response +from karrio.providers.usps_international.utils import Settings import karrio.lib as lib -import karrio.core.units as units -import karrio.core.models as models -import karrio.providers.usps_international.error as error -import karrio.providers.usps_international.utils as provider_utils -import karrio.providers.usps_international.units as provider_units def parse_pickup_cancel_response( _response: lib.Deserializable[dict], - settings: provider_utils.Settings, -) -> typing.Tuple[models.ConfirmationDetails, typing.List[models.Message]]: + settings: Settings, +) -> Tuple[ConfirmationDetails, List[Message]]: response = _response.deserialize() - messages = error.parse_error_response(response, settings) - success = response.get("ok") == True - - confirmation = ( - models.ConfirmationDetails( + errors = parse_error_response(response, settings) + details = ( + ConfirmationDetails( carrier_id=settings.carrier_id, carrier_name=settings.carrier_name, - operation="Cancel Pickup", - success=success, + success=True, + operation="Pickup Cancel", ) - if success + if not any(errors) else None ) - return confirmation, messages + return details, errors def pickup_cancel_request( - payload: models.PickupCancelRequest, - settings: provider_utils.Settings, -) -> lib.Serializable: - - # map data to convert karrio model to usps specific type - request = dict(confirmationNumber=payload.confirmation_number) + payload: PickupCancelRequest, settings: Settings +) -> Serializable: + request = CarrierPickupCancelRequest( + UserID=settings.username, + PASSWORD=settings.password, + FirmName=payload.address.company_name, + SuiteOrApt=payload.address.address_line1, + Address2=SF.concat_str( + payload.address.address_line1, payload.address.address_line2, join=True + ), + Urbanization=None, + City=payload.address.city, + State=payload.address.state_code, + ZIP5=payload.address.postal_code, + ZIP4=None, + ConfirmationNumber=payload.confirmation_number, + ) - return lib.Serializable(request, lib.to_dict) + return Serializable(request) diff --git a/modules/connectors/usps_international/karrio/providers/usps_international/pickup/create.py b/modules/connectors/usps_international/karrio/providers/usps_international/pickup/create.py index 508fc58708..6c0fe7e648 100644 --- a/modules/connectors/usps_international/karrio/providers/usps_international/pickup/create.py +++ b/modules/connectors/usps_international/karrio/providers/usps_international/pickup/create.py @@ -1,102 +1,61 @@ -"""Karrio USPS schedule pickup implementation.""" - -import karrio.schemas.usps_international.pickup_request as usps -import karrio.schemas.usps_international.pickup_response as pickup - -import typing +from typing import Tuple, List +from karrio.schemas.usps.carrier_pickup_schedule_request import ( + CarrierPickupScheduleRequest, + PackageType, +) +from karrio.core.utils import Serializable, SF +from karrio.core.units import Packages +from karrio.core.models import ( + ShipmentRequest, + PickupRequest, + PickupDetails, + Message, +) + +from karrio.providers.usps_international.error import parse_error_response +from karrio.providers.usps_international.utils import Settings import karrio.lib as lib -import karrio.core.units as units -import karrio.core.models as models -import karrio.providers.usps_international.error as error -import karrio.providers.usps_international.utils as provider_utils -import karrio.providers.usps_international.units as provider_units def parse_pickup_response( - _response: lib.Deserializable[dict], - settings: provider_utils.Settings, -) -> typing.Tuple[typing.List[models.RateDetails], typing.List[models.Message]]: - response = _response.deserialize() - - messages = error.parse_error_response(response, settings) - pickup = ( - _extract_details(response, settings) - if "confirmationNumber" in response - else None - ) - - return pickup, messages - - -def _extract_details( - data: dict, - settings: provider_utils.Settings, -) -> models.PickupDetails: - details = lib.to_object(pickup.PickupResponseType, data) - - return models.PickupDetails( - carrier_id=settings.carrier_id, - carrier_name=settings.carrier_name, - confirmation_number=details.confirmationNumber, - pickup_date=lib.fdate(details.pickupDate), - ) - - -def pickup_request( - payload: models.PickupRequest, - settings: provider_utils.Settings, -) -> lib.Serializable: - address = lib.to_address(payload.address) - packages = lib.to_packages(payload.parcels) - options = lib.units.Options( - payload.options, - option_type=lib.units.create_enum( - "PickupOptions", - # fmt: off - { - "usps_package_type": lib.OptionEnum("usps_package_type"), - }, - # fmt: on + response: lib.Deserializable[dict], + settings: Settings, +) -> Tuple[PickupDetails, List[Message]]: + errors = parse_error_response(response.deserialize(), settings) + details = None + + return details, errors + + +def pickup_request(payload: PickupRequest, settings: Settings) -> Serializable: + shipments: List[ShipmentRequest] = payload.options.get("shipments", []) + packages = Packages(payload.parcels) + + request = CarrierPickupScheduleRequest( + USERID=settings.username, + PASSWORD=settings.password, + FirstName=payload.address.person_name, + LastName=None, + FirmName=payload.address.company_name, + SuiteOrApt=payload.address.address_line1, + Address2=SF.concat_str( + payload.address.address_line1, payload.address.address_line2, join=True ), - ) - - # map data to convert karrio model to usps specific type - request = usps.PickupRequestType( - pickupDate=lib.fdate(payload.pickup_date), - pickupAddress=usps.PickupAddressType( - firstName=address.person_name, - lastName=None, - firm=address.company_name, - address=usps.AddressType( - streetAddress=address.address_line1, - secondaryAddress=address.address_line2, - city=address.city, - state=address.state, - ZIPCode=lib.to_zip5(address.postal_code), - ZIPPlus4=lib.to_zip4(address.postal_code) or "", - urbanization=None, - ), - contact=[ - usps.ContactType(email=address.email) - for _ in [address.email] - if _ is not None - ], - ), - packages=[ - usps.PackageType( - packageType=options.usps_package_type.state or "OTHER", - packageCount=len(packages), - ) + Urbanization=None, + City=payload.address.city, + State=payload.address.state_code, + ZIP5=payload.address.postal_code, + ZIP4=None, + Phone=payload.address.phone_number, + Extension=None, + Package=[ + PackageType(ServiceType=shipment.service, Count=len(shipment.parcels)) + for shipment in shipments ], - estimatedWeight=packages.weight.LB, - pickupLocation=lib.identity( - usps.PickupLocationType( - packageLocation=payload.package_location, - specialInstructions=payload.instruction, - ) - if any([payload.package_location, payload.instruction]) - else None - ), + EstimatedWeight=packages.weight.LB, + PackageLocation=payload.package_location, + SpecialInstructions=payload.instruction, + EmailAddress=payload.address.email, ) - return lib.Serializable(request, lib.to_dict) + return Serializable(request) diff --git a/modules/connectors/usps_international/karrio/providers/usps_international/pickup/update.py b/modules/connectors/usps_international/karrio/providers/usps_international/pickup/update.py index bf14d0e0d7..1ebb5c0b45 100644 --- a/modules/connectors/usps_international/karrio/providers/usps_international/pickup/update.py +++ b/modules/connectors/usps_international/karrio/providers/usps_international/pickup/update.py @@ -1,109 +1,63 @@ -"""Karrio USPS update pickup implementation.""" - -import karrio.schemas.usps_international.pickup_update_request as usps -import karrio.schemas.usps_international.pickup_update_response as pickup - -import typing +from typing import Tuple, List +from karrio.schemas.usps.carrier_pickup_change_request import ( + CarrierPickupChangeRequest, + PackageType, +) +from karrio.core.units import Packages +from karrio.core.utils import Serializable, SF +from karrio.core.models import ( + ShipmentRequest, + PickupUpdateRequest, + PickupDetails, + Message, +) + +from karrio.providers.usps_international.error import parse_error_response +from karrio.providers.usps_international.utils import Settings import karrio.lib as lib -import karrio.core.units as units -import karrio.core.models as models -import karrio.providers.usps_international.error as error -import karrio.providers.usps_international.utils as provider_utils -import karrio.providers.usps_international.units as provider_units def parse_pickup_update_response( - _response: lib.Deserializable[dict], - settings: provider_utils.Settings, -) -> typing.Tuple[typing.List[models.RateDetails], typing.List[models.Message]]: - response = _response.deserialize() - - messages = error.parse_error_response(response, settings) - pickup = ( - _extract_details(response, settings) - if "confirmationNumber" in response - else None - ) - - return pickup, messages - + response: lib.Deserializable[dict], settings: Settings +) -> Tuple[PickupDetails, List[Message]]: + errors = parse_error_response(response.deserialize(), settings) + details = None -def _extract_details( - data: dict, - settings: provider_utils.Settings, -) -> models.PickupDetails: - details = lib.to_object(pickup.PickupUpdateResponseType, data) - - return models.PickupDetails( - carrier_id=settings.carrier_id, - carrier_name=settings.carrier_name, - confirmation_number=details.confirmationNumber, - pickup_date=lib.fdate(details.pickupDate), - ) + return details, errors def pickup_update_request( - payload: models.PickupUpdateRequest, - settings: provider_utils.Settings, -) -> lib.Serializable: - address = lib.to_address(payload.address) - packages = lib.to_packages(payload.parcels) - options = lib.units.Options( - payload.options, - option_type=lib.units.create_enum( - "PickupOptions", - # fmt: off - { - "usps_package_type": lib.OptionEnum("usps_package_type"), - }, - # fmt: on - ), - ) - - # map data to convert karrio model to usps specific type - request = usps.PickupUpdateRequestType( - pickupDate=lib.fdate(payload.pickup_date), - carrierPickupRequest=usps.CarrierPickupRequestType( - pickupDate=lib.fdate(payload.pickup_date), - pickupAddress=usps.PickupAddressType( - firstName=address.person_name, - lastName=None, - firm=address.company_name, - address=usps.AddressType( - streetAddress=address.address_line1, - secondaryAddress=address.address_line2, - city=address.city, - state=address.state, - ZIPCode=lib.to_zip5(address.postal_code), - ZIPPlus4=lib.to_zip4(address.postal_code) or "", - urbanization=None, - ), - contact=[ - usps.ContactType(email=address.email) - for _ in [address.email] - if _ is not None - ], - ), - packages=[ - usps.PackageType( - packageType=options.usps_package_type.state or "OTHER", - packageCount=len(packages), - ) - ], - estimatedWeight=packages.weight.LB, - pickupLocation=lib.identity( - usps.PickupLocationType( - packageLocation=payload.package_location, - specialInstructions=payload.instruction, - ) - if any([payload.package_location, payload.instruction]) - else None - ), + payload: PickupUpdateRequest, settings: Settings +) -> Serializable: + shipments: List[ShipmentRequest] = payload.options.get("shipments", []) + packages = Packages(payload.parcels) + + request = CarrierPickupChangeRequest( + USERID=settings.username, + PASSWORD=settings.password, + FirstName=payload.address.person_name, + LastName=None, + FirmName=payload.address.company_name, + SuiteOrApt=payload.address.address_line1, + Address2=SF.concat_str( + payload.address.address_line1, payload.address.address_line2, join=True ), + Urbanization=None, + City=payload.address.city, + State=payload.address.state_code, + ZIP5=payload.address.postal_code, + ZIP4=None, + Phone=payload.address.phone_number, + Extension=None, + Package=[ + PackageType(ServiceType=shipment.service, Count=len(shipment.parcels)) + for shipment in shipments + ], + EstimatedWeight=packages.weight.LB, + PackageLocation=payload.package_location, + SpecialInstructions=payload.instruction, + ConfirmationNumber=payload.confirmation_number, + EmailAddress=payload.address.email, ) - return lib.Serializable( - request, - lib.to_dict, - dict(confirmationNumber=payload.confirmation_number), - ) + return Serializable(request) diff --git a/modules/connectors/usps_international/karrio/providers/usps_international/rate.py b/modules/connectors/usps_international/karrio/providers/usps_international/rate.py index a341089edc..f03c18dcd3 100644 --- a/modules/connectors/usps_international/karrio/providers/usps_international/rate.py +++ b/modules/connectors/usps_international/karrio/providers/usps_international/rate.py @@ -1,67 +1,67 @@ -"""Karrio USPS rating API implementation.""" +from datetime import datetime, timezone +from karrio.schemas.usps.intl_rate_v2_request import ( + IntlRateV2Request, + PackageType, + ExtraServicesType, + GXGType, +) +from karrio.schemas.usps.intl_rate_v2_response import ServiceType -import karrio.schemas.usps_international.rate_request as usps -import karrio.schemas.usps_international.rate_response as rating - -import time import typing import karrio.lib as lib import karrio.core.units as units import karrio.core.models as models import karrio.core.errors as errors -import karrio.providers.usps_international.error as error -import karrio.providers.usps_international.utils as provider_utils +import karrio.providers.usps_international.error as provider_error import karrio.providers.usps_international.units as provider_units +import karrio.providers.usps_international.utils as provider_utils def parse_rate_response( - _response: lib.Deserializable[dict], - settings: provider_utils.Settings, + _response: lib.Deserializable[lib.Element], settings: provider_utils.Settings ) -> typing.Tuple[typing.List[models.RateDetails], typing.List[models.Message]]: - responses = _response.deserialize() - - messages = error.parse_error_response(responses, settings) - rates = lib.to_multi_piece_rates( - [ - ( - f"{_}", - [_extract_details(rate, settings) for rate in response["rateOptions"]], - ) - for _, response in enumerate(responses, start=1) - if response.get("rateOptions") is not None - ] - ) - - return rates, messages + response = _response.deserialize() + quotes: typing.List[models.RateDetails] = [ + _extract_details(package, settings) + for package in lib.find_element("Service", response) + ] + return quotes, provider_error.parse_error_response(response, settings) def _extract_details( - data: dict, - settings: provider_utils.Settings, + postage_node: lib.Element, settings: provider_utils.Settings ) -> models.RateDetails: - rate = lib.to_object(rating.RateOptionType, data) - mail_class = rate.rates[0].mailClass - service = provider_units.ShippingService.map(mail_class) + postage: ServiceType = lib.to_object(ServiceType, postage_node) + service = provider_units.ServiceClassID.map(str(postage.ID)) + delivery_date = lib.to_date(postage.GuaranteeAvailability, "%m/%d/%Y") + transit = ( + (delivery_date.date() - datetime.now().date()).days + if delivery_date is not None + else None + ) + charges = [ - ("Base Charge", lib.to_money(rate.totalBasePrice)), - *[(_.description, lib.to_money(_.price)) for _ in rate.rates], - *[(_.name, lib.to_money(_.price)) for _ in rate.extraServices], + ("Base charge", postage.Postage), + *((s.ServiceName, s.Price) for s in postage.ExtraServices.ExtraService), ] return models.RateDetails( - carrier_id=settings.carrier_id, carrier_name=settings.carrier_name, + carrier_id=settings.carrier_id, service=service.name_or_key, - total_charge=lib.to_money(rate.totalPrice), - currency="USD", + total_charge=lib.to_decimal(postage.Postage), + currency=units.Currency.USD.name, + transit_days=transit, extra_charges=[ - models.ChargeDetails(name=name, currency="USD", amount=amount) + models.ChargeDetails( + name=name, + amount=lib.to_decimal(amount), + currency=units.Currency.USD.name, + ) for name, amount in charges + if amount ], - meta=dict( - service_name=service.name or mail_class, - zone=lib.failsafe(lambda: rate.rates[0].zone), - ), + meta=dict(service_name=service.name or postage.SvcDescription), ) @@ -69,54 +69,91 @@ def rate_request( payload: models.RateRequest, settings: provider_utils.Settings, ) -> lib.Serializable: - shipper = lib.to_address(payload.shipper) - recipient = lib.to_address(payload.recipient) + """Create the appropriate USPS International rate request depending on the destination + + :param payload: Karrio unified API rate request data + :param settings: USPS International connection and auth settings + :return: a domestic or international USPS International compatible request + :raises: + - OriginNotServicedError when origin country is not serviced by the carrier + - DestinationNotServicedError when destination country is US + """ - if shipper.country_code != units.Country.US.name: - raise errors.OriginNotServicedError(shipper.country_code) + if ( + payload.shipper.country_code is not None + and payload.shipper.country_code != units.Country.US.name + ): + raise errors.OriginNotServicedError(payload.shipper.country_code) - if recipient.country_code == units.Country.US.name: - raise errors.DestinationNotServicedError(recipient.country_code) + if payload.recipient.country_code == units.Country.US.name: + raise errors.DestinationNotServicedError(payload.recipient.country_code) + recipient = lib.to_address(payload.recipient) services = lib.to_services(payload.services, provider_units.ShippingService) + package = lib.to_packages( + payload.parcels, + package_option_type=provider_units.ShippingOption, + max_weight=units.Weight(70, units.WeightUnit.LB), + ).single options = lib.to_shipping_options( payload.options, + package_options=package.options, initializer=provider_units.shipping_options_initializer, ) - packages = lib.to_packages( - payload.parcels, - options=options, - package_option_type=provider_units.ShippingOption, - shipping_options_initializer=provider_units.shipping_options_initializer, - ) - # map data to convert karrio model to usps specific type - request = [ - usps.RateRequestType( - originZIPCode=shipper.postal_code, - foreignPostalCode=recipient.postal_code, - destinationCountryCode=recipient.country_code, - weight=package.weight.LB, - length=package.length.IN, - width=package.width.IN, - height=package.height.IN, - mailClass=getattr( - services.first, "value", provider_units.ShippingService.usps_all.value - ), - priceType=package.options.usps_price_type.state or "RETAIL", - mailingDate=lib.fdate( - package.options.shipment_date.state or time.strftime("%Y-%m-%d") - ), - accountType=settings.account_type or "EPS", - accountNumber=settings.account_number, - itemValue=package.items.value_amount, - extraServices=[ - lib.to_int(_.code) - for __, _ in options.items() - if __ not in provider_units.CUSTOM_OPTIONS - ], + commercial = next(("Y" for svc in services if "commercial" in svc.name), "N") + commercial_plus = next(("Y" for svc in services if "plus" in svc.name), "N") + acceptance_date = ( + datetime.isoformat( + (options.shipment_date.state or datetime.now(timezone.utc)), ) - for package in packages - ] + if recipient.postal_code + else None + ) + + request = IntlRateV2Request( + USERID=settings.username, + PASSWORD=settings.password, + Revision="2", + Package=[ + PackageType( + ID=0, + Pounds=0, + Ounces=package.weight.OZ, + Machinable=options.usps_option_machinable_item.state or False, + MailType=provider_units.PackagingType[ + package.packaging_type or "package" + ].value, + GXG=( + GXGType(POBoxFlag="N", GiftFlag="N") + if any( + "global_express_guaranteed" in s.name for s in payload.services + ) + else None + ), + ValueOfContents=(options.declared_value.state or ""), + Country=recipient.country_name, + Width=package.width.IN, + Length=package.length.IN, + Height=package.height.IN, + Girth=( + package.girth.value if package.packaging_type == "tube" else None + ), + OriginZip=payload.shipper.postal_code, + CommercialFlag=commercial, + CommercialPlusFlag=commercial_plus, + AcceptanceDateTime=acceptance_date, + DestinationPostalCode=recipient.postal_code, + ExtraServices=( + ExtraServicesType( + ExtraService=[option.code for _, option in options.items()] + ) + if any(options.items()) + else None + ), + Content=None, + ) + ], + ) - return lib.Serializable(request, lib.to_dict) + return lib.Serializable(request, lib.to_xml) diff --git a/modules/connectors/usps_international/karrio/providers/usps_international/shipment/__init__.py b/modules/connectors/usps_international/karrio/providers/usps_international/shipment/__init__.py index 33e9b88106..0f3d1a6023 100644 --- a/modules/connectors/usps_international/karrio/providers/usps_international/shipment/__init__.py +++ b/modules/connectors/usps_international/karrio/providers/usps_international/shipment/__init__.py @@ -1,8 +1,2 @@ -from karrio.providers.usps_international.shipment.create import ( - parse_shipment_response, - shipment_request, -) -from karrio.providers.usps_international.shipment.cancel import ( - parse_shipment_cancel_response, - shipment_cancel_request, -) +from karrio.providers.usps_international.shipment.create import parse_shipment_response, shipment_request +from karrio.providers.usps_international.shipment.cancel import parse_shipment_cancel_response, shipment_cancel_request diff --git a/modules/connectors/usps_international/karrio/providers/usps_international/shipment/cancel.py b/modules/connectors/usps_international/karrio/providers/usps_international/shipment/cancel.py index a009bba197..cc57cba492 100644 --- a/modules/connectors/usps_international/karrio/providers/usps_international/shipment/cancel.py +++ b/modules/connectors/usps_international/karrio/providers/usps_international/shipment/cancel.py @@ -1,53 +1,53 @@ -import typing +from typing import Tuple, List +from karrio.schemas.usps.evsi_cancel_request import eVSICancelRequest +from karrio.schemas.usps.evsi_cancel_response import eVSICancelResponse +from karrio.core.utils import Serializable, XP +from karrio.core.models import ShipmentCancelRequest, ConfirmationDetails, Message + +from karrio.providers.usps_international.error import parse_error_response +from karrio.providers.usps_international.utils import Settings import karrio.lib as lib -import karrio.core.models as models -import karrio.providers.usps_international.error as error -import karrio.providers.usps_international.utils as provider_utils -import karrio.providers.usps_international.units as provider_units def parse_shipment_cancel_response( - _response: lib.Deserializable[typing.List[typing.Tuple[str, dict]]], - settings: provider_utils.Settings, -) -> typing.Tuple[models.ConfirmationDetails, typing.List[models.Message]]: - responses = _response.deserialize() - messages: typing.List[models.Message] = sum( - [ - error.parse_error_response(response, settings, tracking_number=_) - for _, response in responses - ], - start=[], - ) - success = all([_["ok"] for __, _ in responses]) + _response: lib.Deserializable[lib.Element], + settings: Settings, +) -> Tuple[ConfirmationDetails, List[Message]]: + response = _response.deserialize() + errors: List[Message] = parse_error_response(response, settings) + cancel_response = XP.to_object(eVSICancelResponse, response) + + if cancel_response.Status != "Cancelled": + errors.append( + Message( + carrier_name=settings.carrier_name, + carrier_id=settings.carrier_id, + message=cancel_response.Reason, + code=cancel_response.Status, + ) + ) - confirmation = ( - models.ConfirmationDetails( + details = ( + ConfirmationDetails( carrier_id=settings.carrier_id, carrier_name=settings.carrier_name, - operation="Cancel Shipment", - success=success, + operation="Shipment Cancel", + success=True, ) - if success + if not any(errors) else None ) - return confirmation, messages + return details, errors def shipment_cancel_request( - payload: models.ShipmentCancelRequest, - settings: provider_utils.Settings, -) -> lib.Serializable: - - # map data to convert karrio model to usps specific type - request = [ - dict(trackingNumber=_) - for _ in set( - [ - payload.shipment_identifier, - *((payload.options or {}).get("shipment_identifiers") or []), - ] - ) - ] + payload: ShipmentCancelRequest, settings: Settings +) -> Serializable: + request = eVSICancelRequest( + USERID=settings.username, + PASSWORD=settings.password, + BarcodeNumber=payload.shipment_identifier, + ) - return lib.Serializable(request, lib.to_dict) + return Serializable(request, XP.export) diff --git a/modules/connectors/usps_international/karrio/providers/usps_international/shipment/create.py b/modules/connectors/usps_international/karrio/providers/usps_international/shipment/create.py index 784ba32ef1..fdb867b8b3 100644 --- a/modules/connectors/usps_international/karrio/providers/usps_international/shipment/create.py +++ b/modules/connectors/usps_international/karrio/providers/usps_international/shipment/create.py @@ -1,243 +1,60 @@ -"""Karrio USPS create label implementation.""" - -import karrio.schemas.usps_international.label_request as usps -import karrio.schemas.usps_international.label_response as shipping - -import time -import typing +from typing import Tuple, List + +from karrio.core.errors import OriginNotServicedError, DestinationNotServicedError +from karrio.core.units import Country +from karrio.core.utils import Serializable, Element +from karrio.core.models import ShipmentRequest, ShipmentDetails, Message + +import karrio.providers.usps_international.shipment.priority_mail as priority_mail +import karrio.providers.usps_international.shipment.first_class_mail as first_class_mail +import karrio.providers.usps_international.shipment.priority_express as priority_express +import karrio.providers.usps_international.shipment.global_express_guaranteed as global_express_guaranteed +from karrio.providers.usps_international.units import ServiceType +from karrio.providers.usps_international.utils import Settings import karrio.lib as lib -import karrio.core.units as units -import karrio.core.models as models -import karrio.core.errors as errors -import karrio.providers.usps_international.error as error -import karrio.providers.usps_international.utils as provider_utils -import karrio.providers.usps_international.units as provider_units def parse_shipment_response( - _response: lib.Deserializable[typing.List[dict]], - settings: provider_utils.Settings, -) -> typing.Tuple[typing.List[models.RateDetails], typing.List[models.Message]]: - responses = _response.deserialize() - - shipment = lib.to_multi_piece_shipment( - [ - ( - f"{_}", - _extract_details(response, settings, _response.ctx), - ) - for _, response in enumerate(responses, start=1) - if response.get("error") is None - ] - ) - messages: typing.List[models.Message] = sum( - [error.parse_error_response(response, settings) for response in responses], - start=[], - ) + _response: lib.Deserializable[Element], settings: Settings +) -> Tuple[ShipmentDetails, List[Message]]: + response = _response.deserialize() + if response.tag == "eVSFirstClassMailIntlResponse": + return first_class_mail.parse_shipment_response(response, settings) - return shipment, messages + if response.tag == "eVSGXGGetLabelResponse": + return global_express_guaranteed.parse_shipment_response(response, settings) + if response.tag == "eVSPriorityMailIntlResponse": + return priority_mail.parse_shipment_response(response, settings) -def _extract_details( - data: dict, - settings: provider_utils.Settings, - ctx: dict = None, -) -> models.ShipmentDetails: - details = lib.to_object(shipping.LabelResponseType, data) - label = details.labelImage - label_type = ctx.get("label_type", "PDF") + else: + return priority_express.parse_shipment_response(response, settings) - return models.ShipmentDetails( - carrier_id=settings.carrier_id, - carrier_name=settings.carrier_name, - tracking_number=details.labelMetadata.internationalTrackingNumber, - shipment_identifier=details.labelMetadata.internationalTrackingNumber, - label_type=label_type, - docs=models.Documents(label=label), - meta=dict( - SKU=details.labelMetadata.SKU, - postage=details.labelMetadata.postage, - ), - ) +def shipment_request(payload: ShipmentRequest, settings: Settings) -> Serializable: + if ( + payload.shipper.country_code is not None + and payload.shipper.country_code != Country.US.name + ): + raise OriginNotServicedError(payload.shipper.country_code) -def shipment_request( - payload: models.ShipmentRequest, - settings: provider_utils.Settings, -) -> lib.Serializable: - shipper = lib.to_address(payload.shipper) - recipient = lib.to_address(payload.recipient) + if payload.recipient.country_code == Country.US.name: + raise DestinationNotServicedError(payload.recipient.country_code) - if shipper.country_code != units.Country.US.name: - raise errors.OriginNotServicedError(shipper.country_code) + service = ServiceType[payload.service] - if recipient.country_code == units.Country.US.name: - raise errors.DestinationNotServicedError(recipient.country_code) + # Create a First Class Mail Shipment Request + if service == ServiceType.usps_first_class_mail_international: + return first_class_mail.shipment_request(payload, settings) - service = provider_units.ShippingService.map(payload.service).value_or_key - options = lib.to_shipping_options( - payload.options, - initializer=provider_units.shipping_options_initializer, - ) - packages = lib.to_packages( - payload.parcels, - options=options, - package_option_type=provider_units.ShippingOption, - shipping_options_initializer=provider_units.shipping_options_initializer, - ) - customs = lib.to_customs_info( - payload.customs, - shipper=payload.shipper, - recipient=payload.recipient, - weight_unit=units.WeightUnit.LB.name, - ) - pickup_location = lib.to_address(options.hold_for_pickup_address.state) - label_type = provider_units.LabelType.map(payload.label_type).value or "PDF" + # Create a GXG Shipment Request + elif service == ServiceType.usps_global_express_guaranteed: + return global_express_guaranteed.shipment_request(payload, settings) - # map data to convert karrio model to usps specific type - request = [ - usps.LabelRequestType( - imageInfo=usps.ImageInfoType( - imageType=label_type, - labelType="4X6LABEL", - ), - toAddress=usps.AddressType( - streetAddress=recipient.address_line1, - secondaryAddress=recipient.address_line2, - city=recipient.city, - state=recipient.state, - ZIPCode=lib.to_zip5(recipient.postal_code) or "", - ZIPPlus4=lib.to_zip4(recipient.postal_code) or "", - urbanization=None, - firstName=recipient.person_name, - lastName=None, - firm=recipient.company_name, - phone=recipient.phone_number, - email=recipient.email, - ignoreBadAddress=True, - platformUserId=None, - ), - fromAddress=usps.AddressType( - streetAddress=shipper.address_line1, - secondaryAddress=shipper.address_line2, - city=shipper.city, - state=shipper.state, - ZIPCode=lib.to_zip4(shipper.postal_code) or "", - ZIPPlus4=lib.to_zip5(shipper.postal_code) or "", - urbanization=None, - firstName=shipper.person_name, - lastName=None, - firm=shipper.company_name, - phone=shipper.phone_number, - email=shipper.email, - ignoreBadAddress=True, - platformUserId=None, - ), - senderAddress=usps.AddressType( - streetAddress=shipper.address_line1, - secondaryAddress=shipper.address_line2, - city=shipper.city, - state=shipper.state, - ZIPCode=lib.to_zip4(shipper.postal_code) or "", - ZIPPlus4=lib.to_zip5(shipper.postal_code) or "", - urbanization=None, - firstName=shipper.person_name, - lastName=None, - firm=shipper.company_name, - phone=shipper.phone_number, - email=shipper.email, - ignoreBadAddress=True, - platformUserId=None, - ), - packageDescription=usps.PackageDescriptionType( - weightUOM="lb", - weight=package.weight.LB, - dimensionsUOM="in", - length=package.length.IN, - height=package.height.IN, - width=package.width.IN, - girth=package.girth, - mailClass=service, - rateIndicator=package.options.usps_rate_indicator.state or "SP", - processingCategory=lib.identity( - package.options.usps_processing_category.state or "NON_MACHINABLE" - ), - destinationEntryFacilityType=lib.identity( - package.options.usps_destination_facility_type.state or "NONE" - ), - destinationEntryFacilityAddress=lib.identity( - usps.DestinationEntryFacilityAddressType( - streetAddress=pickup_location.address_line1, - secondaryAddress=pickup_location.address_line2, - city=pickup_location.city, - state=pickup_location.state, - ZIPCode=lib.to_zip4(pickup_location.postal_code) or "", - ZIPPlus4=lib.to_zip5(pickup_location.postal_code) or "", - urbanization=None, - ) - if package.options.hold_for_pickup_address.state is not None - else None - ), - packageOptions=lib.identity( - usps.PackageOptionsType( - packageValue=package.total_value, - nonDeliveryOption=None, - redirectAddress=None, - generateGXEvent=None, - originalPackage=None, - ) - if (package.total_value or 0.0) > 0.0 - else None - ), - customerReference=[ - usps.CustomerReferenceType( - referenceNumber=reference, - ) - for reference in [payload.reference] - if reference is not None - ], - extraServices=[ - lib.to_int(_.code) - for __, _ in package.options.items() - if __ not in provider_units.CUSTOM_OPTIONS - ], - mailingDate=lib.fdate( - package.options.shipment_date.state or time.strftime("%Y-%m-%d") - ), - ), - customsForm=usps.CustomsFormType( - contentComments=customs.content_description, - restrictionType=package.options.usps_restriction_type.state, - restrictionComments=package.options.restrictionComments.state, - AESITN=customs.options.aes.state, - invoiceNumber=customs.invoice, - licenseNumber=customs.options.license_number.state, - certificateNumber=customs.options.certificate_number.state, - customsContentType=lib.identity( - provider_units.CustomsContentType.map(customs.content_type).value - or "OTHER" - ), - importersReference=None, - exportersReference=None, - contents=[ - usps.ContentType( - itemDescription=item.description, - itemQuantity=item.quantity, - itemValue=item.value_amount, - itemTotalValue=item.value_amount * item.quantity, - weightUOM="lb", - itemWeight=item.weight, - itemTotalWeight=item.weight * item.quantity, - HSTariffNumber=item.hs_code, - countryofOrigin=item.origin_country, - itemCategory=None, - itemSubcategory=None, - ) - for item in customs.commodities - ], - ), - ) - for package in packages - ] + # Create a Priority Mail Shipment Request + elif service == ServiceType.usps_priority_mail_international: + return priority_mail.shipment_request(payload, settings) - return lib.Serializable(request, lib.to_dict, dict(label_type=label_type)) + # Fallback to creating a Priority Express Mail Shipment Request + else: + return priority_express.shipment_request(payload, settings) diff --git a/modules/connectors/usps_international/karrio/providers/usps_international/shipment/first_class_mail.py b/modules/connectors/usps_international/karrio/providers/usps_international/shipment/first_class_mail.py new file mode 100644 index 0000000000..371831774b --- /dev/null +++ b/modules/connectors/usps_international/karrio/providers/usps_international/shipment/first_class_mail.py @@ -0,0 +1,177 @@ +from karrio.schemas.usps.evs_first_class_mail_intl_response import ( + eVSFirstClassMailIntlResponse, +) +from karrio.schemas.usps.evs_first_class_mail_intl_request import ( + eVSFirstClassMailIntlRequest, + ImageParametersType, + ShippingContentsType, + ItemDetailType, + ExtraServicesType, +) + +import time +import typing +import karrio.lib as lib +import karrio.core.units as units +import karrio.core.models as models +import karrio.providers.usps_international.error as provider_error +import karrio.providers.usps_international.units as provider_units +import karrio.providers.usps_international.utils as provider_utils + + +def parse_shipment_response( + response: lib.Element, + settings: provider_utils.Settings, +) -> typing.Tuple[models.ShipmentDetails, typing.List[models.Message]]: + errors = provider_error.parse_error_response(response, settings) + details = ( + _extract_details(response, settings) + if len(lib.find_element("BarcodeNumber", response)) > 0 + else None + ) + + return details, errors + + +def _extract_details( + response: lib.Element, + settings: provider_utils.Settings, +) -> models.ShipmentDetails: + shipment = lib.to_object(eVSFirstClassMailIntlResponse, response) + + return models.ShipmentDetails( + carrier_name=settings.carrier_name, + carrier_id=settings.carrier_id, + tracking_number=shipment.BarcodeNumber, + shipment_identifier=shipment.BarcodeNumber, + docs=models.Documents(label=shipment.LabelImage), + meta=dict( + carrier_tracking_link=settings.tracking_url.format(shipment.BarcodeNumber), + ), + ) + + +def shipment_request( + payload: models.ShipmentRequest, + settings: provider_utils.Settings, +) -> lib.Serializable: + shipper = lib.to_address(payload.shipper) + recipient = lib.to_address(payload.recipient) + package = lib.to_packages( + payload.parcels, + max_weight=units.Weight(70, units.WeightUnit.LB), + ).single + options = lib.to_shipping_options( + payload.options, + package_options=package.options, + initializer=provider_units.shipping_options_initializer, + ) + + label_format = provider_units.LabelFormat[ + payload.label_type or "usps_6_x_4_label" + ].value + customs = lib.to_customs_info(payload.customs or models.Customs(commodities=[])) + + request = eVSFirstClassMailIntlRequest( + USERID=settings.username, + PASSWORD=settings.password, + Option=None, + Revision=2, + ImageParameters=ImageParametersType(ImageParameter=label_format), + FromFirstName=customs.signer or shipper.person_name, + FromLastName=shipper.person_name, + FromFirm=shipper.company_name or "N/A", + FromAddress1=shipper.address_line2 or "", + FromAddress2=shipper.street, + FromUrbanization=None, + FromCity=shipper.city, + FromZip5=lib.to_zip5(shipper.postal_code), + FromZip4=lib.to_zip4(shipper.postal_code) or "", + FromPhone=shipper.phone_number, + ToName=None, + ToFirstName=recipient.person_name, + ToLastName=recipient.person_name, + ToFirm=recipient.company_name or "N/A", + ToAddress1=recipient.address_line2 or "", + ToAddress2=recipient.street, + ToAddress3=None, + ToCity=recipient.city, + ToProvince=lib.to_state_name( + recipient.state_code, country=recipient.country_code + ), + ToCountry=lib.to_country_name(recipient.country_code), + ToPostalCode=recipient.postal_code, + ToPOBoxFlag=None, + ToPhone=recipient.phone_number, + ToFax=None, + ToEmail=recipient.email, + FirstClassMailType=None, + ShippingContents=ShippingContentsType( + ItemDetail=[ + ItemDetailType( + Description=lib.text(item.description or item.title or "N/A"), + Quantity=item.quantity, + Value=item.value_amount, + NetPounds=units.Weight( + item.weight, units.WeightUnit[item.weight_unit or "LB"] + ).LB, + NetOunces=units.Weight( + item.weight, units.WeightUnit[item.weight_unit or "LB"] + ).OZ, + HSTariffNumber=item.hs_code or item.sku, + CountryOfOrigin=lib.to_country_name(item.origin_country), + ) + for item in payload.customs.commodities + ] + ), + Postage=None, + GrossPounds=package.weight.LB, + GrossOunces=package.weight.OZ, + ContentType=provider_units.ContentType[customs.content_type or "other"].value, + ContentTypeOther=customs.content_description or "N/A", + Agreement=("N" if customs.certify else "Y"), + Comments=customs.content_description, + LicenseNumber=customs.options.license_number.state, + CertificateNumber=customs.options.certificate_number.state, + InvoiceNumber=customs.invoice, + ImageType="PDF", + ImageLayout="ALLINONEFILE", + CustomerRefNo=None, + CustomerRefNo2=None, + POZipCode=None, + LabelDate=lib.fdatetime( + (options.shipment_date.state or time.strftime("%Y-%m-%d")), + current_format="%Y-%m-%d", + output_format="%m/%d/%Y", + ), + HoldForManifest=None, + EELPFC=customs.options.eel_pfc.state, + Container=None, + Length=package.length.IN, + Width=package.width.IN, + Height=package.height.IN, + Girth=(package.girth.value if package.packaging_type == "tube" else None), + ExtraServices=( + ExtraServicesType( + ExtraService=[option.code for _, option in options.items()] + ) + if any(options.items()) + else None + ), + PriceOptions=None, + ActionCode=None, + OptOutOfSPE=None, + PermitNumber=None, + AccountZipCode=None, + Machinable=(options.usps_option_machinable_item.state or False), + DestinationRateIndicator="I", + MID=settings.mailer_id, + LogisticsManagerMID=settings.logistics_manager_mailer_id, + CRID=settings.customer_registration_id, + VendorCode=None, + VendorProductVersionNumber=None, + RemainingBarcodes=None, + ChargebackCode=None, + ) + + return lib.Serializable(request, lib.to_xml) diff --git a/modules/connectors/usps_international/karrio/providers/usps_international/shipment/global_express_guaranteed.py b/modules/connectors/usps_international/karrio/providers/usps_international/shipment/global_express_guaranteed.py new file mode 100644 index 0000000000..a73db0a1c6 --- /dev/null +++ b/modules/connectors/usps_international/karrio/providers/usps_international/shipment/global_express_guaranteed.py @@ -0,0 +1,186 @@ +from karrio.schemas.usps.evs_gxg_get_label_response import eVSGXGGetLabelResponse +from karrio.schemas.usps.evs_gxg_get_label_request import ( + eVSGXGGetLabelRequest, + ShippingContentsType, + ItemDetailType, +) + +import time +import typing +import karrio.lib as lib +import karrio.core.units as units +import karrio.core.models as models +import karrio.core.errors as errors +import karrio.providers.usps_international.error as provider_error +import karrio.providers.usps_international.units as provider_units +import karrio.providers.usps_international.utils as provider_utils + + +def parse_shipment_response( + response: lib.Element, settings: provider_utils.Settings +) -> typing.Tuple[models.ShipmentDetails, typing.List[models.Message]]: + errors = provider_error.parse_error_response(response, settings) + details = ( + _extract_details(response, settings) + if ( + len(lib.find_element("USPSBarcodeNumber", response)) > 0 + or len(lib.find_element("FedExBarcodeNumber", response)) > 0 + ) + else None + ) + + return details, errors + + +def _extract_details( + response: lib.Element, settings: provider_utils.Settings +) -> models.ShipmentDetails: + shipment = lib.to_object(eVSGXGGetLabelResponse, response) + tracking_number = shipment.USPSBarcodeNumber or shipment.FedExBarcodeNumber + + return models.ShipmentDetails( + carrier_name=settings.carrier_name, + carrier_id=settings.carrier_id, + tracking_number=tracking_number, + shipment_identifier=tracking_number, + docs=models.Documents(label=shipment.LabelImage), + meta=dict( + carrier_tracking_link=settings.tracking_url.format(tracking_number), + ), + ) + + +def shipment_request( + payload: models.ShipmentRequest, settings: provider_utils.Settings +) -> lib.Serializable: + shipper = lib.to_address(payload.shipper) + recipient = lib.to_address(payload.recipient) + package = lib.to_packages( + payload.parcels, + package_option_type=provider_units.ShippingOption, + max_weight=units.Weight(70, units.WeightUnit.LB), + ).single + options = lib.to_shipping_options( + payload.options, + package_options=package.options, + initializer=provider_units.shipping_options_initializer, + ) + + customs = lib.to_customs_info(payload.customs or models.Customs(commodities=[])) + incoterm = provider_units.Incoterm[customs.incoterm or "OTHER"].value + + request = eVSGXGGetLabelRequest( + USERID=settings.username, + PASSWORD=settings.password, + Option=None, + Revision=2, + ImageParameters=None, + FromFirstName=(customs.signer or shipper.person_name or "N/A"), + FromMiddleInitial=None, + FromLastName=shipper.person_name, + FromFirm=shipper.company_name or "N/A", + FromAddress1=shipper.street, + FromAddress2=shipper.address_line2 or "", + FromUrbanization=None, + FromCity=shipper.city, + FromState=lib.to_state_name(shipper.state_code, country="US"), + FromZIP5=lib.to_zip5(shipper.postal_code), + FromZIP4=lib.to_zip4(shipper.postal_code), + FromPhone=shipper.phone_number, + ShipFromZIP=None, + ToFirstName=None, + ToLastName=recipient.person_name, + ToFirm=recipient.company_name or "N/A", + ToAddress1=recipient.street, + ToAddress2=recipient.address_line2 or "", + ToAddress3=None, + ToPostalCode=recipient.postal_code, + ToPhone=recipient.phone_number, + RecipientEMail=recipient.email, + ToDPID="000", # supposedly required test and find a solution + ToProvince=recipient.state_code, + ToTaxID=(recipient.federal_tax_id or recipient.state_tax_id), + Container=provider_units.PackagingType[ + package.packaging_type or "package" + ].value, + ContentType=("DOCUMENTS" if package.parcel.is_document else "NON-DOC"), + ShippingContents=ShippingContentsType( + ItemDetail=[ + ItemDetailType( + Description=lib.text(item.description or item.title or "N/A"), + Commodity=lib.text(item.title or item.description or "N/A", max=35), + Quantity=item.quantity, + UnitValue=item.value_amount, + NetPounds=units.Weight( + item.weight, units.WeightUnit[item.weight_unit or "LB"] + ).LB, + NetOunces=units.Weight( + item.weight, units.WeightUnit[item.weight_unit or "LB"] + ).OZ, + UnitOfMeasure=None, + HSTariffNumber=item.hs_code or item.sku, + CountryofManufacture=lib.to_country_name(item.origin_country), + ) + for item in customs.commodities + ] + ), + PurposeOfShipment=provider_units.ContentType[ + customs.content_type or "other" + ].value, + PartiesToTransaction=None, + Agreement=("N" if customs.certify else "Y"), + Postage=None, + InsuredValue=provider_units.ShippingOption.insurance_from( + options, "global_express" + ), + GrossPounds=package.weight.LB, + GrossOunces=package.weight.OZ, + Length=package.length.IN, + Width=package.width.IN, + Height=package.height.IN, + Girth=(package.girth.value if package.packaging_type == "tube" else None), + Shape=None, + CIRequired=customs.commercial_invoice or None, + InvoiceDate=lib.fdatetime(customs.invoice_date, output_format="%m/%d/%Y"), + InvoiceNumber=customs.invoice, + CustomerOrderNumber=None, + CustOrderNumber=None, + TermsDelivery=incoterm, + TermsDeliveryOther=( + (customs.incoterm or incoterm) if incoterm == "OTHER" else None + ), + PackingCost=None, + CountryUltDest=lib.to_country_name(recipient.country_code), + CIAgreement=customs.commercial_invoice or None, + ImageType="PDF", + ImageLayout=None, + CustomerRefNo=None, + CustomerRefNo2=None, + ShipDate=lib.fdatetime( + (options.shipment_date.state or time.strftime("%Y-%m-%d")), + current_format="%Y-%m-%d", + output_format="%m/%d/%Y", + ), + HoldForManifest=None, + PriceOptions=None, + CommercialShipment=customs.commercial_invoice or None, + BuyerRecipient=( + customs.commercial_invoice or None + ), # Consider recipient as buyer for commercial shipment + TermsPayment=("Net 50" if customs.commercial_invoice else None), + ActionCode=None, + OptOutOfSPE=None, + PermitNumber=None, + AccountZipCode=None, + Machinable=(options.usps_option_machinable_item.state or False), + DestinationRateIndicator="I", + MID=settings.mailer_id, + LogisticsManagerMID=settings.logistics_manager_mailer_id, + CRID=settings.customer_registration_id, + VendorCode=None, + VendorProductVersionNumber=None, + OverrideMID=None, + ChargebackCode=None, + ) + + return lib.Serializable(request, lib.to_xml) diff --git a/modules/connectors/usps_international/karrio/providers/usps_international/shipment/priority_express.py b/modules/connectors/usps_international/karrio/providers/usps_international/shipment/priority_express.py new file mode 100644 index 0000000000..ce760ac7ed --- /dev/null +++ b/modules/connectors/usps_international/karrio/providers/usps_international/shipment/priority_express.py @@ -0,0 +1,191 @@ +from karrio.schemas.usps.evs_express_mail_intl_response import ( + eVSExpressMailIntlResponse, +) +from karrio.schemas.usps.evs_express_mail_intl_request import ( + eVSExpressMailIntlRequest, + ImageParametersType, + ShippingContentsType, + ItemDetailType, +) + +import time +import typing +import karrio.lib as lib +import karrio.core.units as units +import karrio.core.models as models +import karrio.core.errors as errors +import karrio.providers.usps_international.error as provider_error +import karrio.providers.usps_international.units as provider_units +import karrio.providers.usps_international.utils as provider_utils + + +def parse_shipment_response( + response: lib.Element, settings: provider_utils.Settings +) -> typing.Tuple[models.ShipmentDetails, typing.List[models.Message]]: + errors = provider_error.parse_error_response(response, settings) + details = ( + _extract_details(response, settings) + if len(lib.find_element("BarcodeNumber", response)) > 0 + else None + ) + + return details, errors + + +def _extract_details( + response: lib.Element, + settings: provider_utils.Settings, +) -> models.ShipmentDetails: + shipment = lib.to_object(eVSExpressMailIntlResponse, response) + + return models.ShipmentDetails( + carrier_name=settings.carrier_name, + carrier_id=settings.carrier_id, + tracking_number=shipment.BarcodeNumber, + shipment_identifier=shipment.BarcodeNumber, + docs=models.Documents(label=shipment.LabelImage), + meta=dict( + carrier_tracking_link=settings.tracking_url.format(shipment.BarcodeNumber), + ), + ) + + +def shipment_request( + payload: models.ShipmentRequest, + settings: provider_utils.Settings, +) -> lib.Serializable: + shipper = lib.to_address(payload.shipper) + recipient = lib.to_address(payload.recipient) + package = lib.to_packages( + payload.parcels, + package_option_type=provider_units.ShippingOption, + max_weight=units.Weight(70, units.WeightUnit.LB), + ).single + options = lib.to_shipping_options( + payload.options, + package_options=package.options, + initializer=provider_units.shipping_options_initializer, + ) + + label_format = provider_units.LabelFormat[ + payload.label_type or "usps_6_x_4_label" + ].value + customs = lib.to_customs_info(payload.customs or models.Customs(commodities=[])) + redirect_address = lib.to_address( + models.Address(**(options.usps_option_redirect_non_delivery.state or {})) + ) + + request = eVSExpressMailIntlRequest( + USERID=settings.username, + PASSWORD=settings.password, + Option=None, + Revision=2, + ImageParameters=ImageParametersType(ImageParameter=label_format), + FromFirstName=customs.signer or shipper.person_name or "N/A", + FromLastName=shipper.person_name, + FromFirm=shipper.company_name or "N/A", + FromAddress1=shipper.address_line2 or "", + FromAddress2=shipper.address_line1, + FromUrbanization=None, + FromCity=shipper.city, + FromZip5=lib.to_zip5(shipper.postal_code), + FromZip4=lib.to_zip4(shipper.postal_code) or "", + FromPhone=shipper.phone_number, + FromCustomsReference=None, + ToName=None, + ToFirstName=recipient.person_name, + ToLastName=recipient.person_name, + ToFirm=recipient.company_name or "N/A", + ToAddress1=recipient.address_line2 or "", + ToAddress2=recipient.address_line1, + ToAddress3=None, + ToCity=recipient.city, + ToProvince=lib.to_state_name( + recipient.state_code, country=recipient.country_code + ), + ToCountry=lib.to_country_name(recipient.country_code), + ToPostalCode=recipient.postal_code, + ToPOBoxFlag=None, + ToPhone=recipient.phone_number, + ToFax=None, + ToEmail=recipient.email, + ImportersReferenceNumber=None, + NonDeliveryOption=provider_units.ShippingOption.non_delivery_from(options), + RedirectName=redirect_address.person_name, + RedirectEmail=redirect_address.email, + RedirectSMS=redirect_address.phone_number, + RedirectAddress=redirect_address.address_line, + RedirectCity=redirect_address.city, + RedirectState=redirect_address.state_code, + RedirectZipCode=redirect_address.postal_code, + RedirectZip4=lib.to_zip4(redirect_address.postal_code) or "", + Container=None, + ShippingContents=ShippingContentsType( + ItemDetail=[ + ItemDetailType( + Description=lib.text(item.description or item.title or "N/A"), + Quantity=item.quantity, + Value=item.value_amount, + NetPounds=units.Weight( + item.weight, units.WeightUnit[item.weight_unit or "LB"] + ).LB, + NetOunces=units.Weight( + item.weight, units.WeightUnit[item.weight_unit or "LB"] + ).OZ, + HSTariffNumber=item.hs_code or item.sku, + CountryOfOrigin=lib.to_country_name(item.origin_country), + ) + for item in customs.commodities + ] + ), + InsuredAmount=provider_units.ShippingOption.insurance_from( + options, "express_mail" + ), + GrossPounds=package.weight.LB, + GrossOunces=package.weight.OZ, + ContentType=provider_units.ContentType[customs.content_type or "other"].value, + ContentTypeOther=customs.content_description or "N/A", + Agreement=("Y" if customs.certify else "N"), + Comments=customs.content_description, + LicenseNumber=customs.options.license_number.state, + CertificateNumber=customs.options.certificate_number.state, + InvoiceNumber=customs.invoice, + ImageType="PDF", + ImageLayout="ALLINONEFILE", + InsuredNumber=None, + CustomerRefNo=None, + CustomerRefNo2=None, + POZipCode=None, + LabelDate=lib.fdatetime( + (options.shipment_date.state or time.strftime("%Y-%m-%d")), + current_format="%Y-%m-%d", + output_format="%m/%d/%Y", + ), + EMCAAccount=None, + HoldForManifest=None, + EELPFC=customs.options.eel_pfc.state, + PriceOptions=None, + Length=package.length.IN, + Width=package.width.IN, + Height=package.height.IN, + Girth=(package.girth.value if package.packaging_type == "tube" else None), + LabelTime=None, + MeterPaymentFlag=None, + ActionCode=None, + OptOutOfSPE=None, + PermitNumber=None, + AccountZipCode=None, + ImportersReferenceType=None, + ImportersTelephoneNumber=None, + ImportersFaxNumber=None, + ImportersEmail=None, + Machinable=options.usps_option_machinable_item.state or False, + DestinationRateIndicator="I", + MID=settings.mailer_id, + LogisticsManagerMID=settings.logistics_manager_mailer_id, + CRID=settings.customer_registration_id, + VendorCode=None, + VendorProductVersionNumber=None, + ) + + return lib.Serializable(request, lib.to_xml) diff --git a/modules/connectors/usps_international/karrio/providers/usps_international/shipment/priority_mail.py b/modules/connectors/usps_international/karrio/providers/usps_international/shipment/priority_mail.py new file mode 100644 index 0000000000..6772d15862 --- /dev/null +++ b/modules/connectors/usps_international/karrio/providers/usps_international/shipment/priority_mail.py @@ -0,0 +1,200 @@ +from karrio.schemas.usps.evs_priority_mail_intl_response import ( + eVSPriorityMailIntlResponse, +) +from karrio.schemas.usps.evs_priority_mail_intl_request import ( + eVSPriorityMailIntlRequest, + ImageParametersType, + ShippingContentsType, + ItemDetailType, + ExtraServicesType, +) + +import time +import typing +import karrio.lib as lib +import karrio.core.units as units +import karrio.core.models as models +import karrio.core.errors as errors +import karrio.providers.usps_international.error as provider_error +import karrio.providers.usps_international.units as provider_units +import karrio.providers.usps_international.utils as provider_utils + + +def parse_shipment_response( + response: lib.Element, + settings: provider_utils.Settings, +) -> typing.Tuple[models.ShipmentDetails, typing.List[models.Message]]: + errors = provider_error.parse_error_response(response, settings) + details = ( + _extract_details(response, settings) + if len(lib.find_element("BarcodeNumber", response)) > 0 + else None + ) + + return details, errors + + +def _extract_details( + response: lib.Element, + settings: provider_utils.Settings, +) -> models.ShipmentDetails: + shipment = lib.to_object(eVSPriorityMailIntlResponse, response) + + return models.ShipmentDetails( + carrier_name=settings.carrier_name, + carrier_id=settings.carrier_id, + tracking_number=shipment.BarcodeNumber, + shipment_identifier=shipment.BarcodeNumber, + docs=models.Documents(label=shipment.LabelImage), + meta=dict( + carrier_tracking_link=settings.tracking_url.format(shipment.BarcodeNumber), + ), + ) + + +def shipment_request( + payload: models.ShipmentRequest, + settings: provider_utils.Settings, +) -> lib.Serializable: + shipper = lib.to_address(payload.shipper) + recipient = lib.to_address(payload.recipient) + package = lib.to_packages( + payload.parcels, + package_option_type=provider_units.ShippingOption, + max_weight=units.Weight(70, units.WeightUnit.LB), + ).single + options = lib.to_shipping_options( + payload.options, + package_options=package.options, + initializer=provider_units.shipping_options_initializer, + ) + + label_format = provider_units.LabelFormat[ + payload.label_type or "usps_6_x_4_label" + ].value + insurance = provider_units.ShippingOption.insurance_from(options, "priority_mail") + customs = lib.to_customs_info(payload.customs or models.Customs(commodities=[])) + redirect_address = lib.to_address( + models.Address(**(options.usps_option_redirect_non_delivery.state or {})) + ) + + request = eVSPriorityMailIntlRequest( + USERID=settings.username, + PASSWORD=settings.password, + Option=None, + Revision=2, + ImageParameters=ImageParametersType(ImageParameter=label_format), + FromFirstName=customs.signer or shipper.person_name or "N/A", + FromMiddleInitial=None, + FromLastName=shipper.person_name, + FromFirm=shipper.company_name or "N/A", + FromAddress1=shipper.address_line2 or "", + FromAddress2=shipper.street, + FromUrbanization=None, + FromCity=shipper.city, + FromState=lib.to_state_name(shipper.state_code, country="US"), + FromZip5=lib.to_zip5(shipper.postal_code), + FromZip4=lib.to_zip4(shipper.postal_code) or "", + FromPhone=shipper.phone_number, + FromCustomsReference=None, + ToName=None, + ToFirstName=recipient.person_name, + ToLastName=None, + ToFirm=recipient.company_name or "N/A", + ToAddress1=recipient.address_line2 or "", + ToAddress2=recipient.street, + ToAddress3=None, + ToCity=recipient.city, + ToProvince=lib.to_state_name( + recipient.state_code, country=recipient.country_code + ), + ToCountry=lib.to_country_name(recipient.country_code), + ToPostalCode=recipient.postal_code, + ToPOBoxFlag=None, + ToPhone=recipient.phone_number, + ToFax=None, + ToEmail=recipient.email, + ImportersReferenceNumber=None, + NonDeliveryOption=provider_units.ShippingOption.non_delivery_from(options), + RedirectName=redirect_address.person_name, + RedirectEmail=redirect_address.email, + RedirectSMS=redirect_address.phone_number, + RedirectAddress=redirect_address.address_line, + RedirectCity=redirect_address.city, + RedirectState=redirect_address.state_code, + RedirectZipCode=redirect_address.postal_code, + RedirectZip4=lib.to_zip4(redirect_address.postal_code) or "", + Container=None, + ShippingContents=ShippingContentsType( + ItemDetail=[ + ItemDetailType( + Description=lib.text(item.description or item.title or "N/A"), + Quantity=item.quantity, + Value=item.value_amount, + NetPounds=units.Weight( + item.weight, units.WeightUnit[item.weight_unit or "LB"] + ).LB, + NetOunces=units.Weight( + item.weight, units.WeightUnit[item.weight_unit or "LB"] + ).OZ, + HSTariffNumber=item.hs_code or item.sku, + CountryOfOrigin=lib.to_country_name(item.origin_country), + ) + for item in customs.commodities + ] + ), + Insured=("N" if insurance is None else "Y"), + InsuredAmount=insurance, + GrossPounds=package.weight.LB, + GrossOunces=package.weight.OZ, + ContentType=provider_units.ContentType[customs.content_type or "other"].value, + ContentTypeOther=customs.content_description or "N/A", + Agreement=("N" if customs.certify else "Y"), + Comments=customs.content_description, + LicenseNumber=customs.options.license_number.state, + CertificateNumber=customs.options.certificate_number.state, + InvoiceNumber=customs.invoice, + ImageType="PDF", + ImageLayout="ALLINONEFILE", + CustomerRefNo=None, + CustomerRefNo2=None, + POZipCode=None, + LabelDate=lib.fdatetime( + (options.shipment_date.state or time.strftime("%Y-%m-%d")), + current_format="%Y-%m-%d", + output_format="%m/%d/%Y", + ), + EMCAAccount=None, + HoldForManifest=None, + EELPFC=customs.options.eel_pfc.state, + PriceOptions=None, + Length=package.length.IN, + Width=package.width.IN, + Height=package.height.IN, + Girth=(package.girth.value if package.packaging_type == "tube" else None), + ExtraServices=( + ExtraServicesType( + ExtraService=[option.code for _, option in options.items()] + ) + if any(options.items()) + else None + ), + ActionCode=None, + OptOutOfSPE=None, + PermitNumber=None, + AccountZipCode=None, + ImportersReferenceType=None, + ImportersTelephoneNumber=None, + ImportersFaxNumber=None, + ImportersEmail=None, + Machinable=(options.usps_option_machinable_item.state or False), + DestinationRateIndicator="I", + MID=settings.mailer_id, + LogisticsManagerMID=settings.logistics_manager_mailer_id, + CRID=settings.customer_registration_id, + VendorCode=None, + VendorProductVersionNumber=None, + ChargebackCode=None, + ) + + return lib.Serializable(request, lib.to_xml) diff --git a/modules/connectors/usps_international/karrio/providers/usps_international/tracking.py b/modules/connectors/usps_international/karrio/providers/usps_international/tracking.py index 5e276c6ee3..b526ce8356 100644 --- a/modules/connectors/usps_international/karrio/providers/usps_international/tracking.py +++ b/modules/connectors/usps_international/karrio/providers/usps_international/tracking.py @@ -1,89 +1,82 @@ -"""Karrio USPS rating API implementation.""" - -# import karrio.schemas.usps_international.tracking_request as usps -import karrio.schemas.usps_international.tracking_response as tracking - +import karrio.schemas.usps.track_field_request as usps +import karrio.schemas.usps.track_response as tracking import typing import karrio.lib as lib -import karrio.core.units as units import karrio.core.models as models -import karrio.providers.usps_international.error as error -import karrio.providers.usps_international.utils as provider_utils -import karrio.providers.usps_international.units as provider_units +import karrio.providers.usps.error as error +import karrio.providers.usps.utils as provider_utils +import karrio.providers.usps.units as provider_units def parse_tracking_response( - _response: lib.Deserializable[typing.List[typing.Tuple[str, dict]]], + _response: lib.Deserializable[lib.Element], settings: provider_utils.Settings, ) -> typing.Tuple[typing.List[models.TrackingDetails], typing.List[models.Message]]: - responses = _response.deserialize() - - messages: typing.List[models.Message] = sum( - [ - error.parse_error_response(response, settings, tracking_number=_) - for _, response in responses - ], - start=[], - ) - tracking_details = [ - _extract_details(details, settings) - for _, details in responses - if "error" not in details + response = _response.deserialize() + tracks_info = lib.find_element("TrackInfo", response) + details = [ + _extract_details(node, settings) + for node in tracks_info + if len(lib.find_element("TrackDetail", node)) > 0 ] - return tracking_details, messages + return details, error.parse_error_response(response, settings) def _extract_details( - data: dict, + node: lib.Element, settings: provider_utils.Settings, ) -> models.TrackingDetails: - details = lib.to_object(tracking.TrackingResponseType, data) + info = lib.to_object(tracking.TrackInfoType, node) + events: typing.List[tracking.TrackDetailType] = [ + *([info.TrackSummary] or []), + *info.TrackDetail, + ] + delivered = info.StatusCategory.lower() == "delivered" + expected_delivery = lib.fdate( + info.ExpectedDeliveryDate or info.PredictedDeliveryDate, + "%B %d, %Y", + ) status = next( ( status.name for status in list(provider_units.TrackingStatus) - if getattr(details, "status", None) in status.value + if str(getattr(events[0], "EventCode", None)) in status.value ), provider_units.TrackingStatus.in_transit.name, ) return models.TrackingDetails( - carrier_id=settings.carrier_id, carrier_name=settings.carrier_name, - tracking_number=details.trackingNumber, + carrier_id=settings.carrier_id, + tracking_number=info.ID, + estimated_delivery=expected_delivery, + delivered=delivered, + status=status, events=[ models.TrackingEvent( - date=lib.fdate(event.eventTimestamp, "%Y-%m-%dT%H:%M:%SZ"), - description=event.name, - code=event.eventType, - time=lib.flocaltime(event.eventTimestamp, "%Y-%m-%dT%H:%M:%SZ"), - location=lib.text( - event.eventCity, - event.eventZIP, - event.eventState, - event.eventCountry, + code=str(event.EventCode), + date=lib.fdate(event.EventDate, "%B %d, %Y"), + time=lib.flocaltime(event.EventTime, "%H:%M %p"), + description=event.Event, + location=lib.join( + event.EventCity, + event.EventState, + event.EventCountry, + str(event.EventZIPCode or ""), + join=True, separator=", ", ), ) - for event in details.trackingEvents + for event in events ], - estimated_delivery=lib.fdate( - details.expectedDeliveryTimeStamp, - "%Y-%m-%dT%H:%M:%SZ", - ), - delivered=status == "delivered", - status=status, info=models.TrackingInfo( - # fmt: off - carrier_tracking_link=settings.tracking_url.format(details.trackingNumber), - expected_delivery=lib.fdate(details.expectedDeliveryTimeStamp, "%Y-%m-%dT%H:%M:%SZ"), - shipment_service=provider_units.ShippingService.map(details.serviceTypeCode).name_or_key, - shipment_origin_country=details.originCountry, - shipment_origin_postal_code=details.originZIP, - shipment_destination_country=details.destinationCountryCode, - shipment_destination_postal_code=details.destinationZIP, - # fmt: on + carrier_tracking_link=settings.tracking_url.format(info.ID), + shipment_destination_postal_code=info.DestinationZip, + shipment_destination_country=info.DestinationCountryCode, + shipment_origin_country=info.OriginCountryCode, + shipment_origin_postal_code=info.OriginZip, + shipment_service=info.Class, ), ) @@ -92,8 +85,20 @@ def tracking_request( payload: models.TrackingRequest, settings: provider_utils.Settings, ) -> lib.Serializable: + request = usps.TrackFieldRequest( + USERID=settings.username, + PASSWORD=settings.password, + Revision="1", + ClientIp="127.0.0.1", + SourceId="Karrio", + TrackID=[ + usps.TrackIDType( + ID=tracking_number, + DestinationZipCode=None, + MailingDate=None, + ) + for tracking_number in payload.tracking_numbers + ], + ) - # map data to convert karrio model to usps specific type - request = payload.tracking_numbers - - return lib.Serializable(request, lib.to_dict) + return lib.Serializable(request, lib.to_xml) diff --git a/modules/connectors/usps_international/karrio/providers/usps_international/units.py b/modules/connectors/usps_international/karrio/providers/usps_international/units.py index 44cb851c4e..e5183a080e 100644 --- a/modules/connectors/usps_international/karrio/providers/usps_international/units.py +++ b/modules/connectors/usps_international/karrio/providers/usps_international/units.py @@ -1,204 +1,281 @@ -import karrio.lib as lib -import karrio.core.units as units +"""Karrio USPS International units module""" +import typing +from karrio.core import units +from karrio.core.utils import Enum +from karrio.core.models import Address +from karrio.core.utils.enum import OptionEnum -class PackagingType(lib.StrEnum): - """Carrier specific packaging type""" - PACKAGE = "PACKAGE" +class Incoterm(Enum): + CPT = "CPT" + CIP = "CIP" + DAF = "DAF" + DDU = "DDU" + OTHER = "OTHER" - """ Unified Packaging type mapping """ - envelope = PACKAGE - pak = PACKAGE - tube = PACKAGE - pallet = PACKAGE - small_box = PACKAGE - medium_box = PACKAGE - your_packaging = PACKAGE + """ Unified Incoterm type mapping """ + CFR = OTHER + CIF = OTHER + DDP = OTHER + DEQ = OTHER + DES = OTHER + EXW = OTHER + FAS = OTHER + FCA = OTHER + FOB = OTHER -class CustomsContentType(lib.StrEnum): +class ContentType(Enum): + cremated_remains = "CREMATEDREMAINS" merchandise = "MERCHANDISE" + sample = "SAMPLE" gift = "GIFT" - document = "DOCUMENT" - commercial_sample = "COMMERCIAL_SAMPLE" - returned_goods = "RETURNED_GOODS" + documents = "DOCUMENTS" + return_merchandise = "RETURN" + humanitarian = "HUMANITARIAN" + dangerousgoods = "DANGEROUSGOODS" + nonnegotiabledocument = "NONNEGOTIABLEDOCUMENT" + pharmacuticals = "PHARMACUTICALS" + medicalsupplies = "MEDICALSUPPLIES" other = "OTHER" - humanitarian_donations = "HUMANITARIAN_DONATIONS" - dangerous_goods = "DANGEROUS_GOODS" - cremated_remains = "CREMATED_REMAINS" - non_negotiable_document = "NON_NEGOTIABLE_DOCUMENT" - medical_supplies = "MEDICAL_SUPPLIES" - pharmaceuticals = "PHARMACEUTICALS" - """ Unified Content type mapping """ - documents = document - sample = commercial_sample - return_merchandise = returned_goods +class LabelFormat(Enum): + usps_barcode_only = "BARCODE ONLY" + usps_crop = "CROP" + usps_4_x_6_label = "4X6LABEL" + usps_4_x_6_label_l = "4X6LABELL" + usps_6_x_4_label = "6X4LABEL" + usps_4_x_6_label_p = "4X6LABELP" + usps_4_x_6_label_p_page = "4X6LABELP PAGE" + usps_4_x_6_zpl_203_dpi = "4X6ZPL203DPI" + usps_4_x_6_zpl_300_dpi = "4X6ZPL300DPI" + usps_separate_continue_page = "SEPARATECONTINUEPAGE" + """ Unified Label type mapping """ + PDF = usps_6_x_4_label + ZPL = usps_4_x_6_zpl_203_dpi -class LabelType(lib.StrEnum): - """Carrier specific label type""" - PDF = "PDF" - TIFF = "TIFF" - JPG = "JPG" - SVG = "SVG" - ZPL203DPI = "ZPL203DPI" - ZPL300DPI = "ZPL300DPI" - LABEL_BROKER = "LABEL_BROKER" - NONE = "NONE" +class PackagingType(Enum): + all = "ALL" + package = "PACKAGE" + postcards = "POSTCARDS" + envelope = "ENVELOPE" + letter = "LETTER" + large_envelope = "LARGEENVELOPE" + flat_rate = "FLATRATE" + variable = "VARIABLE" + legal_envelope = "LEGALENVELOPE" + usps_gxg_envelope = "USPSGXGENVELOPE" + usps_gxg_legal_envelope = "USPSGXGLEGALENVELOPE" + usps_gxg_tyvek_envelope = "USPSGXGTYVEKENVELOPE" + + """ Unified Packaging type mapping """ + pak = large_envelope + tube = package + pallet = package + small_box = package + medium_box = package + your_packaging = package - """ Unified Label type mapping """ - ZPL = ZPL300DPI - PNG = JPG - - -class ShippingService(lib.StrEnum): - """Carrier specific services""" - - usps_standard_service = "USPS Standard Service" - usps_parcel_select = "PARCEL_SELECT" - usps_parcel_select_lightweight = "PARCEL_SELECT_LIGHTWEIGHT" - usps_priority_mail_express = "PRIORITY_MAIL_EXPRESS" - usps_priority_mail = "PRIORITY_MAIL" - usps_first_class_package_service = "FIRST-CLASS_PACKAGE_SERVICE" - usps_library_mail = "LIBRARY_MAIL" - usps_media_mail = "MEDIA_MAIL" - usps_bound_printed_matter = "BOUND_PRINTED_MATTER" - usps_connect_local = "USPS_CONNECT_LOCAL" - usps_connect_mail = "USPS_CONNECT_MAIL" - usps_connect_next_day = "USPS_CONNECT_NEXT_DAY" - usps_connect_regional = "USPS_CONNECT_REGIONAL" - usps_connect_same_day = "USPS_CONNECT_SAME_DAY" - usps_ground_advantage = "USPS_GROUND_ADVANTAGE" - usps_retail_ground = "USPS_RETAIL_GROUND" - usps_all = "ALL" - - -class ShippingOption(lib.Enum): - """Carrier specific options""" - - # fmt: off - usps_label_delivery_service = lib.OptionEnum("415", bool) - usps_tracking_plus_6_months = lib.OptionEnum("480", bool) - usps_tracking_plus_1_year = lib.OptionEnum("481", bool) - usps_tracking_plus_3_years = lib.OptionEnum("482", bool) - usps_tracking_plus_5_years = lib.OptionEnum("483", bool) - usps_tracking_plus_7_years = lib.OptionEnum("484", bool) - usps_tracking_plus_10_years = lib.OptionEnum("485", bool) - usps_tracking_plus_signature_3_years = lib.OptionEnum("486", bool) - usps_tracking_plus_signature_5_years = lib.OptionEnum("487", bool) - usps_tracking_plus_signature_7_years = lib.OptionEnum("488", bool) - usps_tracking_plus_signature_10_years = lib.OptionEnum("489", bool) - usps_hazardous_materials_air_eligible_ethanol = lib.OptionEnum("810", bool) - usps_hazardous_materials_class_1_toy_propellant_safety_fuse_package = lib.OptionEnum("811", bool) - usps_hazardous_materials_class_3_flammable_and_combustible_liquids = lib.OptionEnum("812", bool) - usps_hazardous_materials_class_7_radioactive_materials = lib.OptionEnum("813", bool) - usps_hazardous_materials_class_8_air_eligible_corrosive_materials = lib.OptionEnum("814", bool) - usps_hazardous_materials_class_8_nonspillable_wet_batteries = lib.OptionEnum("815", bool) - usps_hazardous_materials_class_9_lithium_battery_marked_ground_only = lib.OptionEnum("816", bool) - usps_hazardous_materials_class_9_lithium_battery_returns = lib.OptionEnum("817", bool) - usps_hazardous_materials_class_9_marked_lithium_batteries = lib.OptionEnum("818", bool) - usps_hazardous_materials_class_9_dry_ice = lib.OptionEnum("819", bool) - usps_hazardous_materials_class_9_unmarked_lithium_batteries = lib.OptionEnum("820", bool) - usps_hazardous_materials_class_9_magnetized_materials = lib.OptionEnum("821", bool) - usps_hazardous_materials_division_4_1_mailable_flammable_solids_and_safety_matches = lib.OptionEnum("822", bool) - usps_hazardous_materials_division_5_1_oxidizers = lib.OptionEnum("823", bool) - usps_hazardous_materials_division_5_2_organic_peroxides = lib.OptionEnum("824", bool) - usps_hazardous_materials_division_6_1_toxic_materials = lib.OptionEnum("825", bool) - usps_hazardous_materials_division_6_2_biological_materials = lib.OptionEnum("826", bool) - usps_hazardous_materials_excepted_quantity_provision = lib.OptionEnum("827", bool) - usps_hazardous_materials_ground_only_hazardous_materials = lib.OptionEnum("828", bool) - usps_hazardous_materials_air_eligible_id8000_consumer_commodity = lib.OptionEnum("829", bool) - usps_hazardous_materials_lighters = lib.OptionEnum("830", bool) - usps_hazardous_materials_limited_quantity_ground = lib.OptionEnum("831", bool) - usps_hazardous_materials_small_quantity_provision_markings_required = lib.OptionEnum("832", bool) - usps_hazardous_materials = lib.OptionEnum("857", bool) - usps_certified_mail = lib.OptionEnum("910", bool) - usps_certified_mail_restricted_delivery = lib.OptionEnum("911", bool) - usps_certified_mail_adult_signature_required = lib.OptionEnum("912", bool) - usps_certified_mail_adult_signature_restricted_delivery = lib.OptionEnum("913", bool) - usps_collect_on_delivery = lib.OptionEnum("915", float) - usps_collect_on_delivery_restricted_delivery = lib.OptionEnum("917", bool) - usps_tracking_electronic = lib.OptionEnum("920", bool) - usps_signature_confirmation = lib.OptionEnum("921", bool) - usps_adult_signature_required = lib.OptionEnum("922", bool) - usps_adult_signature_restricted_delivery = lib.OptionEnum("923", bool) - usps_signature_confirmation_restricted_delivery = lib.OptionEnum("924", bool) - usps_priority_mail_express_merchandise_insurance = lib.OptionEnum("925", bool) - usps_insurance_bellow_500 = lib.OptionEnum("930", float) - usps_insurance_above_500 = lib.OptionEnum("931", float) - usps_insurance_restricted_delivery = lib.OptionEnum("934", bool) - usps_registered_mail = lib.OptionEnum("940", bool) - usps_registered_mail_restricted_delivery = lib.OptionEnum("941", bool) - usps_return_receipt = lib.OptionEnum("955", bool) - usps_return_receipt_electronic = lib.OptionEnum("957", bool) - usps_signature_requested_priority_mail_express_only = lib.OptionEnum("981", bool) - usps_parcel_locker_delivery = lib.OptionEnum("984", bool) - usps_po_to_addressee_priority_mail_express_only = lib.OptionEnum("986", bool) - usps_sunday_delivery = lib.OptionEnum("981", bool) - # fmt: on - - """ Custom Options """ - usps_price_type = lib.OptionEnum("priceType") - usps_facility_id = lib.OptionEnum("facilityId") - usps_hold_for_pickup = lib.OptionEnum("holdForPickup", bool) - usps_rate_indicator = lib.OptionEnum("rateIndicator") - usps_processing_category = lib.OptionEnum("processingCategory") - usps_carrier_release = lib.OptionEnum("carrierRelease", bool) - usps_physical_signature_required = lib.OptionEnum("physicalSignatureRequired", bool) - usps_restriction_type = lib.OptionEnum("restrictionType") - - """ Unified Option type mapping """ - cash_on_delivery = usps_collect_on_delivery - signature_confirmation = usps_signature_confirmation - sunday_delivery = usps_sunday_delivery - hold_at_location = usps_hold_for_pickup - - -CUSTOM_OPTIONS = [ - ShippingOption.usps_price_type.name, - ShippingOption.usps_facility_id.name, - ShippingOption.usps_hold_for_pickup.name, - ShippingOption.usps_rate_indicator.name, - ShippingOption.usps_processing_category.name, - ShippingOption.usps_carrier_release.name, - ShippingOption.usps_physical_signature_required.name, -] + +class ShippingOption(Enum): + usps_registered_mail = OptionEnum("103") + usps_insurance_global_express_guaranteed = OptionEnum("106", float) + usps_insurance_express_mail_international = OptionEnum("107", float) + usps_insurance_priority_mail_international = OptionEnum("108", float) + usps_return_receipt = OptionEnum("105") + usps_certificate_of_mailing = OptionEnum("100") + usps_electronic_usps_delivery_confirmation_international = OptionEnum("109") + + """ Non official options """ + usps_option_machinable_item = OptionEnum("usps_option_machinable_item", bool) + usps_option_abandon_non_delivery = OptionEnum("ABANDON") + usps_option_return_non_delivery = OptionEnum("RETURN") + usps_option_redirect_non_delivery = OptionEnum("REDIRECT", Address) + + @classmethod + def insurance_from( + cls, options: units.Options, service_key: str + ) -> typing.Optional[float]: + return next( + ( + value.state + for key, value in options + if "usps_insurance" in key and service_key in key + ), + options.insurance.state, + ) + + @classmethod + def non_delivery_from(cls, options: units.Options) -> typing.Optional[str]: + # Gets the first provided non delivery option or default to "RETURN" + return next( + (value.state for name, value in options if "non_delivery" in name), + "RETURN", + ) def shipping_options_initializer( options: dict, - package_options: units.ShippingOptions = None, -) -> units.ShippingOptions: + package_options: units.Options = None, +) -> units.Options: """ Apply default values to the given options. """ + _options = options.copy() if package_options is not None: - options.update(package_options.content) - - if "insurance" in options: - if lib.to_money(options["insurance"]) > 500: - options[ShippingOption.usps_insurance_above_500.name] = options["insurance"] - else: - options[ShippingOption.usps_insurance_bellow_500.name] = options[ - "insurance" - ] - - def items_filter(key: str) -> bool: - return key in ShippingOption # type: ignore - - return units.ShippingOptions(options, ShippingOption, items_filter=items_filter) - - -class TrackingStatus(lib.Enum): - on_hold = ["on_hold"] - delivered = ["delivered"] - in_transit = ["in_transit"] - delivery_failed = ["delivery_failed"] - delivery_delayed = ["delivery_delayed"] - out_for_delivery = ["out_for_delivery"] - ready_for_pickup = ["ready_for_pickup"] + _options.update(package_options.content) + + def items_filter(code: str) -> bool: + return code in ShippingOption and "usps_option" not in code # type: ignore + + return units.ShippingOptions(_options, ShippingOption, items_filter=items_filter) + + +class ShippingService(Enum): + usps_first_class = "First Class" + usps_first_class_commercial = "First Class Commercial" + usps_first_class_hfp_commercial = "First Class HFPCommercial" + usps_priority = "Priority" + usps_priority_commercial = "Priority Commercial" + usps_priority_cpp = "Priority Cpp" + usps_priority_hfp_commercial = "Priority HFP Commercial" + usps_priority_hfp_cpp = "Priority HFP CPP" + usps_priority_mail_express = "Priority Mail Express" + usps_priority_mail_express_commercial = "Priority Mail Express Commercial" + usps_priority_mail_express_cpp = "Priority Mail Express CPP" + usps_priority_mail_express_sh = "Priority Mail Express Sh" + usps_priority_mail_express_sh_commercial = "Priority Mail Express ShCommercial" + usps_priority_mail_express_hfp = "Priority Mail Express HFP" + usps_priority_mail_express_hfp_commercial = "Priority Mail Express HFP Commercial" + usps_priority_mail_express_hfp_cpp = "Priority Mail Express HFP CPP" + usps_priority_mail_cubic = "Priority Mail Cubic" + usps_retail_ground = "Retail Ground" + usps_media = "Media" + usps_library = "Library" + usps_all = "All" + usps_online = "Online" + usps_plus = "Plus" + usps_bpm = "BPM" + + +class ServiceClassID(Enum): + usps_priority_mail_express_international = "1" + usps_priority_mail_international = "2" + usps_global_express_guaranteed_gxg = "4" + usps_global_express_guaranteed_document = "5" + usps_global_express_guaranteed_non_document_rectangular = "6" + usps_global_express_guaranteed_non_document_non_rectangular = "7" + usps_priority_mail_international_flat_rate_envelope = "8" + usps_priority_mail_international_medium_flat_rate_box = "9" + usps_priority_mail_express_international_flat_rate_envelope = "10" + usps_priority_mail_international_large_flat_rate_box = "11" + usps_global_express_guaranteed_envelopes = "12" + usps_first_class_mail_international_letter = "13" + usps_first_class_mail_international_large_envelope = "14" + usps_first_class_package_international_service = "15" + usps_priority_mail_international_small_flat_rate_box = "16" + usps_priority_mail_express_international_legal_flat_rate_envelope = "17" + usps_priority_mail_international_gift_card_flat_rate_envelope = "18" + usps_priority_mail_international_window_flat_rate_envelope = "19" + usps_priority_mail_international_small_flat_rate_envelope = "20" + usps_first_class_mail_international_postcard = "21" + usps_priority_mail_international_legal_flat_rate_envelope = "22" + usps_priority_mail_international_padded_flat_rate_envelope = "23" + usps_priority_mail_international_dvd_flat_rate_priced_box = "24" + usps_priority_mail_international_large_video_flat_rate_priced_box = "25" + usps_priority_mail_express_international_padded_flat_rate_envelope = "27" + + +class ServiceType(Enum): + usps_global_express_guaranteed = "GXG" + usps_first_class_mail_international = "First-Class Mail International" + usps_priority_mail_international = "Priority Mail International" + usps_priority_mail_express_international = "Priority Mail Express International" + + """ ShipmentService type correspondence """ + usps_global_express_guaranteed_gxg = usps_global_express_guaranteed + usps_global_express_guaranteed_document = usps_global_express_guaranteed + usps_global_express_guaranteed_envelopes = usps_global_express_guaranteed + usps_global_express_guaranteed_non_document_rectangular = ( + usps_global_express_guaranteed + ) + usps_global_express_guaranteed_non_document_non_rectangular = ( + usps_global_express_guaranteed + ) + usps_priority_mail_international_flat_rate_envelope = ( + usps_priority_mail_international + ) + usps_priority_mail_international_medium_flat_rate_box = ( + usps_priority_mail_international + ) + usps_priority_mail_express_international_flat_rate_envelope = ( + usps_priority_mail_express_international + ) + usps_priority_mail_international_large_flat_rate_box = ( + usps_priority_mail_international + ) + usps_first_class_mail_international_letter = usps_first_class_mail_international + usps_first_class_mail_international_large_envelope = ( + usps_first_class_mail_international + ) + usps_first_class_package_international_service = usps_first_class_mail_international + usps_priority_mail_international_small_flat_rate_box = ( + usps_priority_mail_international + ) + usps_priority_mail_express_international_legal_flat_rate_envelope = ( + usps_priority_mail_express_international + ) + usps_priority_mail_international_gift_card_flat_rate_envelope = ( + usps_priority_mail_international + ) + usps_priority_mail_international_window_flat_rate_envelope = ( + usps_priority_mail_international + ) + usps_priority_mail_international_small_flat_rate_envelope = ( + usps_priority_mail_international + ) + usps_first_class_mail_international_postcard = usps_first_class_mail_international + usps_priority_mail_international_legal_flat_rate_envelope = ( + usps_priority_mail_international + ) + usps_priority_mail_international_padded_flat_rate_envelope = ( + usps_priority_mail_international + ) + usps_priority_mail_international_dvd_flat_rate_priced_box = ( + usps_priority_mail_international + ) + usps_priority_mail_international_large_video_flat_rate_priced_box = ( + usps_priority_mail_international + ) + usps_priority_mail_express_international_padded_flat_rate_envelope = ( + usps_priority_mail_express_international + ) + + +class TrackingStatus(Enum): + in_transit = [""] + delivered = ["1"] + ready_for_pickup = ["16"] + delivery_failed = [ + "4", + "5", + "9", + "11", + "31", + "44", + "21", + "22", + "23", + "24", + "25", + "26", + "27", + "28", + "29", + ] + out_for_delivery = ["7"] diff --git a/modules/connectors/usps_international/karrio/providers/usps_international/utils.py b/modules/connectors/usps_international/karrio/providers/usps_international/utils.py index d6859e7acd..3daf440a9f 100644 --- a/modules/connectors/usps_international/karrio/providers/usps_international/utils.py +++ b/modules/connectors/usps_international/karrio/providers/usps_international/utils.py @@ -1,17 +1,22 @@ -import datetime -import karrio.lib as lib -import karrio.core as core -import karrio.core.errors as errors +"""Karrio USPS International client settings.""" +from karrio.core.settings import Settings as BaseSettings -class Settings(core.Settings): - """USPS connection settings.""" - # Add carrier specific api connection properties here - client_id: str - client_secret: str - account_type: str = None - account_number: str = None +class Settings(BaseSettings): + """USPS International connection settings.""" + + # Carrier specific properties + username: str + password: str + mailer_id: str = None + customer_registration_id: str = None + logistics_manager_mailer_id: str = None + + id: str = None + account_country_code: str = "US" + metadata: dict = {} + config: dict = {} @property def carrier_name(self): @@ -19,69 +24,8 @@ def carrier_name(self): @property def server_url(self): - return "https://api.usps.com" + return "https://secure.shippingapis.com/ShippingAPI.dll" @property def tracking_url(self): return "https://tools.usps.com/go/TrackConfirmAction?tLabels={}" - - @property - def connection_config(self) -> lib.units.Options: - return lib.to_connection_config( - self.config or {}, - option_type=ConnectionConfig, - ) - - @property - def access_token(self): - """Retrieve the access_token using the client_id|client_secret pair - or collect it from the cache if an unexpired access_token exist. - """ - cache_key = f"{self.carrier_name}|{self.client_id}|{self.client_secret}" - now = datetime.datetime.now() + datetime.timedelta(minutes=30) - - auth = self.connection_cache.get(cache_key) or {} - token = auth.get("access_token") - expiry = lib.to_date(auth.get("expiry"), current_format="%Y-%m-%d %H:%M:%S") - - if token is not None and expiry is not None and expiry > now: - return token - - self.connection_cache.set(cache_key, lambda: login(self)) - new_auth = self.connection_cache.get(cache_key) - - return new_auth["access_token"] - - -def login(settings: Settings, client_id: str = None, client_secret: str = None): - import karrio.providers.usps_international.error as error - - result = lib.request( - url=f"{settings.server_url}/oauth2/v3/token", - method="POST", - headers={"content-Type": "application/x-www-form-urlencoded"}, - data=lib.to_query_string( - dict( - grant_type="client_credentials", - client_id=client_id, - client_secret=client_secret, - ) - ), - ) - - response = lib.to_dict(result) - messages = error.parse_error_response(response, settings) - - if any(messages): - raise errors.ShippingSDKError(messages) - - expiry = datetime.datetime.now() + datetime.timedelta( - seconds=float(response.get("expires_in", 0)) - ) - - return {**response, "expiry": lib.fdatetime(expiry)} - - -class ConnectionConfig(lib.Enum): - shipping_options = lib.OptionEnum("shipping_options", list) - shipping_services = lib.OptionEnum("shipping_services", list) diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/address_validate_request.py b/modules/connectors/usps_international/karrio/schemas/usps_international/address_validate_request.py new file mode 100644 index 0000000000..f177e82812 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/address_validate_request.py @@ -0,0 +1,1563 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:01 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/address_validate_request.py') +# +# Command line arguments: +# ./schemas/AddressValidateRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/address_validate_request.py" ./schemas/AddressValidateRequest.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class AddressValidateRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, Revision=None, Address=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.Revision = Revision + self.Revision_nsprefix_ = None + self.Address = Address + self.Address_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, AddressValidateRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if AddressValidateRequest.subclass: + return AddressValidateRequest.subclass(*args_, **kwargs_) + else: + return AddressValidateRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Revision(self): + return self.Revision + def set_Revision(self, Revision): + self.Revision = Revision + def get_Address(self): + return self.Address + def set_Address(self, Address): + self.Address = Address + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def has__content(self): + if ( + self.Revision is not None or + self.Address is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AddressValidateRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('AddressValidateRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'AddressValidateRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AddressValidateRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AddressValidateRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AddressValidateRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AddressValidateRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Revision is not None: + namespaceprefix_ = self.Revision_nsprefix_ + ':' if (UseCapturedNS_ and self.Revision_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRevision>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Revision), input_name='Revision')), namespaceprefix_ , eol_)) + if self.Address is not None: + namespaceprefix_ = self.Address_nsprefix_ + ':' if (UseCapturedNS_ and self.Address_nsprefix_) else '' + self.Address.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Address', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Revision': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Revision') + value_ = self.gds_validate_string(value_, node, 'Revision') + self.Revision = value_ + self.Revision_nsprefix_ = child_.prefix + elif nodeName_ == 'Address': + obj_ = AddressType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Address = obj_ + obj_.original_tagname_ = 'Address' +# end class AddressValidateRequest + + +class AddressType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ID=None, FirmName=None, Address1=None, Address2=None, City=None, State=None, Urbanization=None, Zip5=None, Zip4=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ID = _cast(None, ID) + self.ID_nsprefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.Address1 = Address1 + self.Address1_nsprefix_ = None + self.Address2 = Address2 + self.Address2_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.Urbanization = Urbanization + self.Urbanization_nsprefix_ = None + self.Zip5 = Zip5 + self.Zip5_nsprefix_ = None + self.Zip4 = Zip4 + self.Zip4_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, AddressType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if AddressType.subclass: + return AddressType.subclass(*args_, **kwargs_) + else: + return AddressType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_Address1(self): + return self.Address1 + def set_Address1(self, Address1): + self.Address1 = Address1 + def get_Address2(self): + return self.Address2 + def set_Address2(self, Address2): + self.Address2 = Address2 + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_Urbanization(self): + return self.Urbanization + def set_Urbanization(self, Urbanization): + self.Urbanization = Urbanization + def get_Zip5(self): + return self.Zip5 + def set_Zip5(self, Zip5): + self.Zip5 = Zip5 + def get_Zip4(self): + return self.Zip4 + def set_Zip4(self, Zip4): + self.Zip4 = Zip4 + def get_ID(self): + return self.ID + def set_ID(self, ID): + self.ID = ID + def has__content(self): + if ( + self.FirmName is not None or + self.Address1 is not None or + self.Address2 is not None or + self.City is not None or + self.State is not None or + self.Urbanization is not None or + self.Zip5 is not None or + self.Zip4 is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AddressType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('AddressType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'AddressType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AddressType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AddressType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AddressType'): + if self.ID is not None and 'ID' not in already_processed: + already_processed.add('ID') + outfile.write(' ID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ID), input_name='ID')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AddressType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.Address1 is not None: + namespaceprefix_ = self.Address1_nsprefix_ + ':' if (UseCapturedNS_ and self.Address1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address1), input_name='Address1')), namespaceprefix_ , eol_)) + if self.Address2 is not None: + namespaceprefix_ = self.Address2_nsprefix_ + ':' if (UseCapturedNS_ and self.Address2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address2), input_name='Address2')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.Urbanization is not None: + namespaceprefix_ = self.Urbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.Urbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Urbanization), input_name='Urbanization')), namespaceprefix_ , eol_)) + if self.Zip5 is not None: + namespaceprefix_ = self.Zip5_nsprefix_ + ':' if (UseCapturedNS_ and self.Zip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZip5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Zip5), input_name='Zip5')), namespaceprefix_ , eol_)) + if self.Zip4 is not None: + namespaceprefix_ = self.Zip4_nsprefix_ + ':' if (UseCapturedNS_ and self.Zip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZip4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Zip4), input_name='Zip4')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ID', node) + if value is not None and 'ID' not in already_processed: + already_processed.add('ID') + self.ID = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'Address1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address1') + value_ = self.gds_validate_string(value_, node, 'Address1') + self.Address1 = value_ + self.Address1_nsprefix_ = child_.prefix + elif nodeName_ == 'Address2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address2') + value_ = self.gds_validate_string(value_, node, 'Address2') + self.Address2 = value_ + self.Address2_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'Urbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Urbanization') + value_ = self.gds_validate_string(value_, node, 'Urbanization') + self.Urbanization = value_ + self.Urbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'Zip5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Zip5') + value_ = self.gds_validate_string(value_, node, 'Zip5') + self.Zip5 = value_ + self.Zip5_nsprefix_ = child_.prefix + elif nodeName_ == 'Zip4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Zip4') + value_ = self.gds_validate_string(value_, node, 'Zip4') + self.Zip4 = value_ + self.Zip4_nsprefix_ = child_.prefix +# end class AddressType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'AddressValidateRequest' + rootClass = AddressValidateRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'AddressValidateRequest' + rootClass = AddressValidateRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'AddressValidateRequest' + rootClass = AddressValidateRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'AddressValidateRequest' + rootClass = AddressValidateRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from address_validate_request import *\n\n') + sys.stdout.write('import address_validate_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "AddressType", + "AddressValidateRequest" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/address_validate_response.py b/modules/connectors/usps_international/karrio/schemas/usps_international/address_validate_response.py new file mode 100644 index 0000000000..b9ed1ade2f --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/address_validate_response.py @@ -0,0 +1,1722 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:01 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/address_validate_response.py') +# +# Command line arguments: +# ./schemas/AddressValidateResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/address_validate_response.py" ./schemas/AddressValidateResponse.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class AddressValidateResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Address=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Address = Address + self.Address_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, AddressValidateResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if AddressValidateResponse.subclass: + return AddressValidateResponse.subclass(*args_, **kwargs_) + else: + return AddressValidateResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Address(self): + return self.Address + def set_Address(self, Address): + self.Address = Address + def has__content(self): + if ( + self.Address is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AddressValidateResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('AddressValidateResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'AddressValidateResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AddressValidateResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AddressValidateResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AddressValidateResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AddressValidateResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Address is not None: + namespaceprefix_ = self.Address_nsprefix_ + ':' if (UseCapturedNS_ and self.Address_nsprefix_) else '' + self.Address.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Address', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Address': + obj_ = AddressType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Address = obj_ + obj_.original_tagname_ = 'Address' +# end class AddressValidateResponse + + +class AddressType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ID=None, FirmName=None, Address1=None, Address2=None, Address2Abbreviation=None, City=None, CityAbbreviation=None, State=None, Urbanization=None, Zip5=None, Zip4=None, DeliveryPoint=None, CarrierRoute=None, Footnotes=None, DPVConfirmation=None, DPVCMRA=None, DPVFootnotes=None, Business=None, CentralDeliveryPoint=None, Vacant=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ID = _cast(int, ID) + self.ID_nsprefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.Address1 = Address1 + self.Address1_nsprefix_ = None + self.Address2 = Address2 + self.Address2_nsprefix_ = None + self.Address2Abbreviation = Address2Abbreviation + self.Address2Abbreviation_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.CityAbbreviation = CityAbbreviation + self.CityAbbreviation_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.Urbanization = Urbanization + self.Urbanization_nsprefix_ = None + self.Zip5 = Zip5 + self.Zip5_nsprefix_ = None + self.Zip4 = Zip4 + self.Zip4_nsprefix_ = None + self.DeliveryPoint = DeliveryPoint + self.DeliveryPoint_nsprefix_ = None + self.CarrierRoute = CarrierRoute + self.CarrierRoute_nsprefix_ = None + self.Footnotes = Footnotes + self.Footnotes_nsprefix_ = None + self.DPVConfirmation = DPVConfirmation + self.DPVConfirmation_nsprefix_ = None + self.DPVCMRA = DPVCMRA + self.DPVCMRA_nsprefix_ = None + self.DPVFootnotes = DPVFootnotes + self.DPVFootnotes_nsprefix_ = None + self.Business = Business + self.Business_nsprefix_ = None + self.CentralDeliveryPoint = CentralDeliveryPoint + self.CentralDeliveryPoint_nsprefix_ = None + self.Vacant = Vacant + self.Vacant_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, AddressType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if AddressType.subclass: + return AddressType.subclass(*args_, **kwargs_) + else: + return AddressType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_Address1(self): + return self.Address1 + def set_Address1(self, Address1): + self.Address1 = Address1 + def get_Address2(self): + return self.Address2 + def set_Address2(self, Address2): + self.Address2 = Address2 + def get_Address2Abbreviation(self): + return self.Address2Abbreviation + def set_Address2Abbreviation(self, Address2Abbreviation): + self.Address2Abbreviation = Address2Abbreviation + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_CityAbbreviation(self): + return self.CityAbbreviation + def set_CityAbbreviation(self, CityAbbreviation): + self.CityAbbreviation = CityAbbreviation + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_Urbanization(self): + return self.Urbanization + def set_Urbanization(self, Urbanization): + self.Urbanization = Urbanization + def get_Zip5(self): + return self.Zip5 + def set_Zip5(self, Zip5): + self.Zip5 = Zip5 + def get_Zip4(self): + return self.Zip4 + def set_Zip4(self, Zip4): + self.Zip4 = Zip4 + def get_DeliveryPoint(self): + return self.DeliveryPoint + def set_DeliveryPoint(self, DeliveryPoint): + self.DeliveryPoint = DeliveryPoint + def get_CarrierRoute(self): + return self.CarrierRoute + def set_CarrierRoute(self, CarrierRoute): + self.CarrierRoute = CarrierRoute + def get_Footnotes(self): + return self.Footnotes + def set_Footnotes(self, Footnotes): + self.Footnotes = Footnotes + def get_DPVConfirmation(self): + return self.DPVConfirmation + def set_DPVConfirmation(self, DPVConfirmation): + self.DPVConfirmation = DPVConfirmation + def get_DPVCMRA(self): + return self.DPVCMRA + def set_DPVCMRA(self, DPVCMRA): + self.DPVCMRA = DPVCMRA + def get_DPVFootnotes(self): + return self.DPVFootnotes + def set_DPVFootnotes(self, DPVFootnotes): + self.DPVFootnotes = DPVFootnotes + def get_Business(self): + return self.Business + def set_Business(self, Business): + self.Business = Business + def get_CentralDeliveryPoint(self): + return self.CentralDeliveryPoint + def set_CentralDeliveryPoint(self, CentralDeliveryPoint): + self.CentralDeliveryPoint = CentralDeliveryPoint + def get_Vacant(self): + return self.Vacant + def set_Vacant(self, Vacant): + self.Vacant = Vacant + def get_ID(self): + return self.ID + def set_ID(self, ID): + self.ID = ID + def has__content(self): + if ( + self.FirmName is not None or + self.Address1 is not None or + self.Address2 is not None or + self.Address2Abbreviation is not None or + self.City is not None or + self.CityAbbreviation is not None or + self.State is not None or + self.Urbanization is not None or + self.Zip5 is not None or + self.Zip4 is not None or + self.DeliveryPoint is not None or + self.CarrierRoute is not None or + self.Footnotes is not None or + self.DPVConfirmation is not None or + self.DPVCMRA is not None or + self.DPVFootnotes is not None or + self.Business is not None or + self.CentralDeliveryPoint is not None or + self.Vacant is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AddressType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('AddressType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'AddressType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AddressType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AddressType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AddressType'): + if self.ID is not None and 'ID' not in already_processed: + already_processed.add('ID') + outfile.write(' ID="%s"' % self.gds_format_integer(self.ID, input_name='ID')) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AddressType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.Address1 is not None: + namespaceprefix_ = self.Address1_nsprefix_ + ':' if (UseCapturedNS_ and self.Address1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address1), input_name='Address1')), namespaceprefix_ , eol_)) + if self.Address2 is not None: + namespaceprefix_ = self.Address2_nsprefix_ + ':' if (UseCapturedNS_ and self.Address2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address2), input_name='Address2')), namespaceprefix_ , eol_)) + if self.Address2Abbreviation is not None: + namespaceprefix_ = self.Address2Abbreviation_nsprefix_ + ':' if (UseCapturedNS_ and self.Address2Abbreviation_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress2Abbreviation>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address2Abbreviation), input_name='Address2Abbreviation')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.CityAbbreviation is not None: + namespaceprefix_ = self.CityAbbreviation_nsprefix_ + ':' if (UseCapturedNS_ and self.CityAbbreviation_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCityAbbreviation>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CityAbbreviation), input_name='CityAbbreviation')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.Urbanization is not None: + namespaceprefix_ = self.Urbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.Urbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Urbanization), input_name='Urbanization')), namespaceprefix_ , eol_)) + if self.Zip5 is not None: + namespaceprefix_ = self.Zip5_nsprefix_ + ':' if (UseCapturedNS_ and self.Zip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZip5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Zip5), input_name='Zip5')), namespaceprefix_ , eol_)) + if self.Zip4 is not None: + namespaceprefix_ = self.Zip4_nsprefix_ + ':' if (UseCapturedNS_ and self.Zip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZip4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Zip4), input_name='Zip4')), namespaceprefix_ , eol_)) + if self.DeliveryPoint is not None: + namespaceprefix_ = self.DeliveryPoint_nsprefix_ + ':' if (UseCapturedNS_ and self.DeliveryPoint_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDeliveryPoint>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DeliveryPoint), input_name='DeliveryPoint')), namespaceprefix_ , eol_)) + if self.CarrierRoute is not None: + namespaceprefix_ = self.CarrierRoute_nsprefix_ + ':' if (UseCapturedNS_ and self.CarrierRoute_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCarrierRoute>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CarrierRoute), input_name='CarrierRoute')), namespaceprefix_ , eol_)) + if self.Footnotes is not None: + namespaceprefix_ = self.Footnotes_nsprefix_ + ':' if (UseCapturedNS_ and self.Footnotes_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFootnotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Footnotes), input_name='Footnotes')), namespaceprefix_ , eol_)) + if self.DPVConfirmation is not None: + namespaceprefix_ = self.DPVConfirmation_nsprefix_ + ':' if (UseCapturedNS_ and self.DPVConfirmation_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDPVConfirmation>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DPVConfirmation), input_name='DPVConfirmation')), namespaceprefix_ , eol_)) + if self.DPVCMRA is not None: + namespaceprefix_ = self.DPVCMRA_nsprefix_ + ':' if (UseCapturedNS_ and self.DPVCMRA_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDPVCMRA>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DPVCMRA), input_name='DPVCMRA')), namespaceprefix_ , eol_)) + if self.DPVFootnotes is not None: + namespaceprefix_ = self.DPVFootnotes_nsprefix_ + ':' if (UseCapturedNS_ and self.DPVFootnotes_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDPVFootnotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DPVFootnotes), input_name='DPVFootnotes')), namespaceprefix_ , eol_)) + if self.Business is not None: + namespaceprefix_ = self.Business_nsprefix_ + ':' if (UseCapturedNS_ and self.Business_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBusiness>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Business), input_name='Business')), namespaceprefix_ , eol_)) + if self.CentralDeliveryPoint is not None: + namespaceprefix_ = self.CentralDeliveryPoint_nsprefix_ + ':' if (UseCapturedNS_ and self.CentralDeliveryPoint_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCentralDeliveryPoint>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CentralDeliveryPoint), input_name='CentralDeliveryPoint')), namespaceprefix_ , eol_)) + if self.Vacant is not None: + namespaceprefix_ = self.Vacant_nsprefix_ + ':' if (UseCapturedNS_ and self.Vacant_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sVacant>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Vacant), input_name='Vacant')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ID', node) + if value is not None and 'ID' not in already_processed: + already_processed.add('ID') + self.ID = self.gds_parse_integer(value, node, 'ID') + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'Address1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address1') + value_ = self.gds_validate_string(value_, node, 'Address1') + self.Address1 = value_ + self.Address1_nsprefix_ = child_.prefix + elif nodeName_ == 'Address2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address2') + value_ = self.gds_validate_string(value_, node, 'Address2') + self.Address2 = value_ + self.Address2_nsprefix_ = child_.prefix + elif nodeName_ == 'Address2Abbreviation': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address2Abbreviation') + value_ = self.gds_validate_string(value_, node, 'Address2Abbreviation') + self.Address2Abbreviation = value_ + self.Address2Abbreviation_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'CityAbbreviation': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CityAbbreviation') + value_ = self.gds_validate_string(value_, node, 'CityAbbreviation') + self.CityAbbreviation = value_ + self.CityAbbreviation_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'Urbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Urbanization') + value_ = self.gds_validate_string(value_, node, 'Urbanization') + self.Urbanization = value_ + self.Urbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'Zip5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Zip5') + value_ = self.gds_validate_string(value_, node, 'Zip5') + self.Zip5 = value_ + self.Zip5_nsprefix_ = child_.prefix + elif nodeName_ == 'Zip4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Zip4') + value_ = self.gds_validate_string(value_, node, 'Zip4') + self.Zip4 = value_ + self.Zip4_nsprefix_ = child_.prefix + elif nodeName_ == 'DeliveryPoint': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DeliveryPoint') + value_ = self.gds_validate_string(value_, node, 'DeliveryPoint') + self.DeliveryPoint = value_ + self.DeliveryPoint_nsprefix_ = child_.prefix + elif nodeName_ == 'CarrierRoute': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CarrierRoute') + value_ = self.gds_validate_string(value_, node, 'CarrierRoute') + self.CarrierRoute = value_ + self.CarrierRoute_nsprefix_ = child_.prefix + elif nodeName_ == 'Footnotes': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Footnotes') + value_ = self.gds_validate_string(value_, node, 'Footnotes') + self.Footnotes = value_ + self.Footnotes_nsprefix_ = child_.prefix + elif nodeName_ == 'DPVConfirmation': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DPVConfirmation') + value_ = self.gds_validate_string(value_, node, 'DPVConfirmation') + self.DPVConfirmation = value_ + self.DPVConfirmation_nsprefix_ = child_.prefix + elif nodeName_ == 'DPVCMRA': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DPVCMRA') + value_ = self.gds_validate_string(value_, node, 'DPVCMRA') + self.DPVCMRA = value_ + self.DPVCMRA_nsprefix_ = child_.prefix + elif nodeName_ == 'DPVFootnotes': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DPVFootnotes') + value_ = self.gds_validate_string(value_, node, 'DPVFootnotes') + self.DPVFootnotes = value_ + self.DPVFootnotes_nsprefix_ = child_.prefix + elif nodeName_ == 'Business': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Business') + value_ = self.gds_validate_string(value_, node, 'Business') + self.Business = value_ + self.Business_nsprefix_ = child_.prefix + elif nodeName_ == 'CentralDeliveryPoint': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CentralDeliveryPoint') + value_ = self.gds_validate_string(value_, node, 'CentralDeliveryPoint') + self.CentralDeliveryPoint = value_ + self.CentralDeliveryPoint_nsprefix_ = child_.prefix + elif nodeName_ == 'Vacant': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Vacant') + value_ = self.gds_validate_string(value_, node, 'Vacant') + self.Vacant = value_ + self.Vacant_nsprefix_ = child_.prefix +# end class AddressType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'AddressValidateResponse' + rootClass = AddressValidateResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'AddressValidateResponse' + rootClass = AddressValidateResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'AddressValidateResponse' + rootClass = AddressValidateResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'AddressValidateResponse' + rootClass = AddressValidateResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from address_validate_response import *\n\n') + sys.stdout.write('import address_validate_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "AddressType", + "AddressValidateResponse" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_availability_request.py b/modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_availability_request.py new file mode 100644 index 0000000000..1840423791 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_availability_request.py @@ -0,0 +1,1471 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:01 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/carrier_pickup_availability_request.py') +# +# Command line arguments: +# ./schemas/CarrierPickupAvailabilityRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/carrier_pickup_availability_request.py" ./schemas/CarrierPickupAvailabilityRequest.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class CarrierPickupAvailabilityRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, FirmName=None, SuiteOrApt=None, Address2=None, Urbanization=None, City=None, State=None, ZIP5=None, ZIP4=None, Date=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.SuiteOrApt = SuiteOrApt + self.SuiteOrApt_nsprefix_ = None + self.Address2 = Address2 + self.Address2_nsprefix_ = None + self.Urbanization = Urbanization + self.Urbanization_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.ZIP5 = ZIP5 + self.ZIP5_nsprefix_ = None + self.ZIP4 = ZIP4 + self.ZIP4_nsprefix_ = None + self.Date = Date + self.Date_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CarrierPickupAvailabilityRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CarrierPickupAvailabilityRequest.subclass: + return CarrierPickupAvailabilityRequest.subclass(*args_, **kwargs_) + else: + return CarrierPickupAvailabilityRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_SuiteOrApt(self): + return self.SuiteOrApt + def set_SuiteOrApt(self, SuiteOrApt): + self.SuiteOrApt = SuiteOrApt + def get_Address2(self): + return self.Address2 + def set_Address2(self, Address2): + self.Address2 = Address2 + def get_Urbanization(self): + return self.Urbanization + def set_Urbanization(self, Urbanization): + self.Urbanization = Urbanization + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_ZIP5(self): + return self.ZIP5 + def set_ZIP5(self, ZIP5): + self.ZIP5 = ZIP5 + def get_ZIP4(self): + return self.ZIP4 + def set_ZIP4(self, ZIP4): + self.ZIP4 = ZIP4 + def get_Date(self): + return self.Date + def set_Date(self, Date): + self.Date = Date + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.FirmName is not None or + self.SuiteOrApt is not None or + self.Address2 is not None or + self.Urbanization is not None or + self.City is not None or + self.State is not None or + self.ZIP5 is not None or + self.ZIP4 is not None or + self.Date is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupAvailabilityRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CarrierPickupAvailabilityRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CarrierPickupAvailabilityRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CarrierPickupAvailabilityRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CarrierPickupAvailabilityRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CarrierPickupAvailabilityRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupAvailabilityRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.SuiteOrApt is not None: + namespaceprefix_ = self.SuiteOrApt_nsprefix_ + ':' if (UseCapturedNS_ and self.SuiteOrApt_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSuiteOrApt>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SuiteOrApt), input_name='SuiteOrApt')), namespaceprefix_ , eol_)) + if self.Address2 is not None: + namespaceprefix_ = self.Address2_nsprefix_ + ':' if (UseCapturedNS_ and self.Address2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address2), input_name='Address2')), namespaceprefix_ , eol_)) + if self.Urbanization is not None: + namespaceprefix_ = self.Urbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.Urbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Urbanization), input_name='Urbanization')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.ZIP5 is not None: + namespaceprefix_ = self.ZIP5_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP5), input_name='ZIP5')), namespaceprefix_ , eol_)) + if self.ZIP4 is not None: + namespaceprefix_ = self.ZIP4_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP4), input_name='ZIP4')), namespaceprefix_ , eol_)) + if self.Date is not None: + namespaceprefix_ = self.Date_nsprefix_ + ':' if (UseCapturedNS_ and self.Date_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Date), input_name='Date')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'SuiteOrApt': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SuiteOrApt') + value_ = self.gds_validate_string(value_, node, 'SuiteOrApt') + self.SuiteOrApt = value_ + self.SuiteOrApt_nsprefix_ = child_.prefix + elif nodeName_ == 'Address2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address2') + value_ = self.gds_validate_string(value_, node, 'Address2') + self.Address2 = value_ + self.Address2_nsprefix_ = child_.prefix + elif nodeName_ == 'Urbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Urbanization') + value_ = self.gds_validate_string(value_, node, 'Urbanization') + self.Urbanization = value_ + self.Urbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP5') + value_ = self.gds_validate_string(value_, node, 'ZIP5') + self.ZIP5 = value_ + self.ZIP5_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP4') + value_ = self.gds_validate_string(value_, node, 'ZIP4') + self.ZIP4 = value_ + self.ZIP4_nsprefix_ = child_.prefix + elif nodeName_ == 'Date': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Date') + value_ = self.gds_validate_string(value_, node, 'Date') + self.Date = value_ + self.Date_nsprefix_ = child_.prefix +# end class CarrierPickupAvailabilityRequest + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupAvailabilityRequest' + rootClass = CarrierPickupAvailabilityRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupAvailabilityRequest' + rootClass = CarrierPickupAvailabilityRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupAvailabilityRequest' + rootClass = CarrierPickupAvailabilityRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupAvailabilityRequest' + rootClass = CarrierPickupAvailabilityRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from carrier_pickup_availability_request import *\n\n') + sys.stdout.write('import carrier_pickup_availability_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CarrierPickupAvailabilityRequest" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_availability_response.py b/modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_availability_response.py new file mode 100644 index 0000000000..59fb7151b7 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_availability_response.py @@ -0,0 +1,1481 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:01 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/carrier_pickup_availability_response.py') +# +# Command line arguments: +# ./schemas/CarrierPickupAvailabilityResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/carrier_pickup_availability_response.py" ./schemas/CarrierPickupAvailabilityResponse.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class CarrierPickupAvailabilityResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, FirmName=None, SuiteOrApt=None, Address2=None, Urbanization=None, City=None, State=None, ZIP5=None, ZIP4=None, DayOfWeek=None, Date=None, CarrierRoute=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.SuiteOrApt = SuiteOrApt + self.SuiteOrApt_nsprefix_ = None + self.Address2 = Address2 + self.Address2_nsprefix_ = None + self.Urbanization = Urbanization + self.Urbanization_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.ZIP5 = ZIP5 + self.ZIP5_nsprefix_ = None + self.ZIP4 = ZIP4 + self.ZIP4_nsprefix_ = None + self.DayOfWeek = DayOfWeek + self.DayOfWeek_nsprefix_ = None + self.Date = Date + self.Date_nsprefix_ = None + self.CarrierRoute = CarrierRoute + self.CarrierRoute_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CarrierPickupAvailabilityResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CarrierPickupAvailabilityResponse.subclass: + return CarrierPickupAvailabilityResponse.subclass(*args_, **kwargs_) + else: + return CarrierPickupAvailabilityResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_SuiteOrApt(self): + return self.SuiteOrApt + def set_SuiteOrApt(self, SuiteOrApt): + self.SuiteOrApt = SuiteOrApt + def get_Address2(self): + return self.Address2 + def set_Address2(self, Address2): + self.Address2 = Address2 + def get_Urbanization(self): + return self.Urbanization + def set_Urbanization(self, Urbanization): + self.Urbanization = Urbanization + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_ZIP5(self): + return self.ZIP5 + def set_ZIP5(self, ZIP5): + self.ZIP5 = ZIP5 + def get_ZIP4(self): + return self.ZIP4 + def set_ZIP4(self, ZIP4): + self.ZIP4 = ZIP4 + def get_DayOfWeek(self): + return self.DayOfWeek + def set_DayOfWeek(self, DayOfWeek): + self.DayOfWeek = DayOfWeek + def get_Date(self): + return self.Date + def set_Date(self, Date): + self.Date = Date + def get_CarrierRoute(self): + return self.CarrierRoute + def set_CarrierRoute(self, CarrierRoute): + self.CarrierRoute = CarrierRoute + def has__content(self): + if ( + self.FirmName is not None or + self.SuiteOrApt is not None or + self.Address2 is not None or + self.Urbanization is not None or + self.City is not None or + self.State is not None or + self.ZIP5 is not None or + self.ZIP4 is not None or + self.DayOfWeek is not None or + self.Date is not None or + self.CarrierRoute is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupAvailabilityResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CarrierPickupAvailabilityResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CarrierPickupAvailabilityResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CarrierPickupAvailabilityResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CarrierPickupAvailabilityResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CarrierPickupAvailabilityResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupAvailabilityResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.SuiteOrApt is not None: + namespaceprefix_ = self.SuiteOrApt_nsprefix_ + ':' if (UseCapturedNS_ and self.SuiteOrApt_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSuiteOrApt>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SuiteOrApt), input_name='SuiteOrApt')), namespaceprefix_ , eol_)) + if self.Address2 is not None: + namespaceprefix_ = self.Address2_nsprefix_ + ':' if (UseCapturedNS_ and self.Address2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address2), input_name='Address2')), namespaceprefix_ , eol_)) + if self.Urbanization is not None: + namespaceprefix_ = self.Urbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.Urbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Urbanization), input_name='Urbanization')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.ZIP5 is not None: + namespaceprefix_ = self.ZIP5_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP5), input_name='ZIP5')), namespaceprefix_ , eol_)) + if self.ZIP4 is not None: + namespaceprefix_ = self.ZIP4_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP4), input_name='ZIP4')), namespaceprefix_ , eol_)) + if self.DayOfWeek is not None: + namespaceprefix_ = self.DayOfWeek_nsprefix_ + ':' if (UseCapturedNS_ and self.DayOfWeek_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDayOfWeek>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DayOfWeek), input_name='DayOfWeek')), namespaceprefix_ , eol_)) + if self.Date is not None: + namespaceprefix_ = self.Date_nsprefix_ + ':' if (UseCapturedNS_ and self.Date_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Date), input_name='Date')), namespaceprefix_ , eol_)) + if self.CarrierRoute is not None: + namespaceprefix_ = self.CarrierRoute_nsprefix_ + ':' if (UseCapturedNS_ and self.CarrierRoute_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCarrierRoute>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CarrierRoute), input_name='CarrierRoute')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'SuiteOrApt': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SuiteOrApt') + value_ = self.gds_validate_string(value_, node, 'SuiteOrApt') + self.SuiteOrApt = value_ + self.SuiteOrApt_nsprefix_ = child_.prefix + elif nodeName_ == 'Address2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address2') + value_ = self.gds_validate_string(value_, node, 'Address2') + self.Address2 = value_ + self.Address2_nsprefix_ = child_.prefix + elif nodeName_ == 'Urbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Urbanization') + value_ = self.gds_validate_string(value_, node, 'Urbanization') + self.Urbanization = value_ + self.Urbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP5') + value_ = self.gds_validate_string(value_, node, 'ZIP5') + self.ZIP5 = value_ + self.ZIP5_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP4') + value_ = self.gds_validate_string(value_, node, 'ZIP4') + self.ZIP4 = value_ + self.ZIP4_nsprefix_ = child_.prefix + elif nodeName_ == 'DayOfWeek': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DayOfWeek') + value_ = self.gds_validate_string(value_, node, 'DayOfWeek') + self.DayOfWeek = value_ + self.DayOfWeek_nsprefix_ = child_.prefix + elif nodeName_ == 'Date': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Date') + value_ = self.gds_validate_string(value_, node, 'Date') + self.Date = value_ + self.Date_nsprefix_ = child_.prefix + elif nodeName_ == 'CarrierRoute': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CarrierRoute') + value_ = self.gds_validate_string(value_, node, 'CarrierRoute') + self.CarrierRoute = value_ + self.CarrierRoute_nsprefix_ = child_.prefix +# end class CarrierPickupAvailabilityResponse + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupAvailabilityResponse' + rootClass = CarrierPickupAvailabilityResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupAvailabilityResponse' + rootClass = CarrierPickupAvailabilityResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupAvailabilityResponse' + rootClass = CarrierPickupAvailabilityResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupAvailabilityResponse' + rootClass = CarrierPickupAvailabilityResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from carrier_pickup_availability_response import *\n\n') + sys.stdout.write('import carrier_pickup_availability_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CarrierPickupAvailabilityResponse" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_cancel_request.py b/modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_cancel_request.py new file mode 100644 index 0000000000..9d85b48248 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_cancel_request.py @@ -0,0 +1,1471 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:02 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/carrier_pickup_cancel_request.py') +# +# Command line arguments: +# ./schemas/CarrierPickupCancelRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/carrier_pickup_cancel_request.py" ./schemas/CarrierPickupCancelRequest.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class CarrierPickupCancelRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, FirmName=None, SuiteOrApt=None, Address2=None, Urbanization=None, City=None, State=None, ZIP5=None, ZIP4=None, ConfirmationNumber=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.SuiteOrApt = SuiteOrApt + self.SuiteOrApt_nsprefix_ = None + self.Address2 = Address2 + self.Address2_nsprefix_ = None + self.Urbanization = Urbanization + self.Urbanization_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.ZIP5 = ZIP5 + self.ZIP5_nsprefix_ = None + self.ZIP4 = ZIP4 + self.ZIP4_nsprefix_ = None + self.ConfirmationNumber = ConfirmationNumber + self.ConfirmationNumber_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CarrierPickupCancelRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CarrierPickupCancelRequest.subclass: + return CarrierPickupCancelRequest.subclass(*args_, **kwargs_) + else: + return CarrierPickupCancelRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_SuiteOrApt(self): + return self.SuiteOrApt + def set_SuiteOrApt(self, SuiteOrApt): + self.SuiteOrApt = SuiteOrApt + def get_Address2(self): + return self.Address2 + def set_Address2(self, Address2): + self.Address2 = Address2 + def get_Urbanization(self): + return self.Urbanization + def set_Urbanization(self, Urbanization): + self.Urbanization = Urbanization + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_ZIP5(self): + return self.ZIP5 + def set_ZIP5(self, ZIP5): + self.ZIP5 = ZIP5 + def get_ZIP4(self): + return self.ZIP4 + def set_ZIP4(self, ZIP4): + self.ZIP4 = ZIP4 + def get_ConfirmationNumber(self): + return self.ConfirmationNumber + def set_ConfirmationNumber(self, ConfirmationNumber): + self.ConfirmationNumber = ConfirmationNumber + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.FirmName is not None or + self.SuiteOrApt is not None or + self.Address2 is not None or + self.Urbanization is not None or + self.City is not None or + self.State is not None or + self.ZIP5 is not None or + self.ZIP4 is not None or + self.ConfirmationNumber is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupCancelRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CarrierPickupCancelRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CarrierPickupCancelRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CarrierPickupCancelRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CarrierPickupCancelRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CarrierPickupCancelRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupCancelRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.SuiteOrApt is not None: + namespaceprefix_ = self.SuiteOrApt_nsprefix_ + ':' if (UseCapturedNS_ and self.SuiteOrApt_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSuiteOrApt>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SuiteOrApt), input_name='SuiteOrApt')), namespaceprefix_ , eol_)) + if self.Address2 is not None: + namespaceprefix_ = self.Address2_nsprefix_ + ':' if (UseCapturedNS_ and self.Address2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address2), input_name='Address2')), namespaceprefix_ , eol_)) + if self.Urbanization is not None: + namespaceprefix_ = self.Urbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.Urbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Urbanization), input_name='Urbanization')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.ZIP5 is not None: + namespaceprefix_ = self.ZIP5_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP5), input_name='ZIP5')), namespaceprefix_ , eol_)) + if self.ZIP4 is not None: + namespaceprefix_ = self.ZIP4_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP4), input_name='ZIP4')), namespaceprefix_ , eol_)) + if self.ConfirmationNumber is not None: + namespaceprefix_ = self.ConfirmationNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.ConfirmationNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sConfirmationNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ConfirmationNumber), input_name='ConfirmationNumber')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'SuiteOrApt': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SuiteOrApt') + value_ = self.gds_validate_string(value_, node, 'SuiteOrApt') + self.SuiteOrApt = value_ + self.SuiteOrApt_nsprefix_ = child_.prefix + elif nodeName_ == 'Address2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address2') + value_ = self.gds_validate_string(value_, node, 'Address2') + self.Address2 = value_ + self.Address2_nsprefix_ = child_.prefix + elif nodeName_ == 'Urbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Urbanization') + value_ = self.gds_validate_string(value_, node, 'Urbanization') + self.Urbanization = value_ + self.Urbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP5') + value_ = self.gds_validate_string(value_, node, 'ZIP5') + self.ZIP5 = value_ + self.ZIP5_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP4') + value_ = self.gds_validate_string(value_, node, 'ZIP4') + self.ZIP4 = value_ + self.ZIP4_nsprefix_ = child_.prefix + elif nodeName_ == 'ConfirmationNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ConfirmationNumber') + value_ = self.gds_validate_string(value_, node, 'ConfirmationNumber') + self.ConfirmationNumber = value_ + self.ConfirmationNumber_nsprefix_ = child_.prefix +# end class CarrierPickupCancelRequest + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupCancelRequest' + rootClass = CarrierPickupCancelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupCancelRequest' + rootClass = CarrierPickupCancelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupCancelRequest' + rootClass = CarrierPickupCancelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupCancelRequest' + rootClass = CarrierPickupCancelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from carrier_pickup_cancel_request import *\n\n') + sys.stdout.write('import carrier_pickup_cancel_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CarrierPickupCancelRequest" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_cancel_response.py b/modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_cancel_response.py new file mode 100644 index 0000000000..5ed45ceac5 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_cancel_response.py @@ -0,0 +1,1754 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:02 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/carrier_pickup_cancel_response.py') +# +# Command line arguments: +# ./schemas/CarrierPickupCancelResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/carrier_pickup_cancel_response.py" ./schemas/CarrierPickupCancelResponse.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class CarrierPickupCancelResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, FirstName=None, LastName=None, FirmName=None, SuiteOrApt=None, Address2=None, Urbanization=None, City=None, State=None, ZIP5=None, ZIP4=None, Phone=None, Extension=None, Package=None, EstimatedWeight=None, PackageLocation=None, SpecialInstructions=None, ConfirmationNumber=None, DayOfWeek=None, Date=None, CarrierRoute=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.FirstName = FirstName + self.FirstName_nsprefix_ = None + self.LastName = LastName + self.LastName_nsprefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.SuiteOrApt = SuiteOrApt + self.SuiteOrApt_nsprefix_ = None + self.Address2 = Address2 + self.Address2_nsprefix_ = None + self.Urbanization = Urbanization + self.Urbanization_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.ZIP5 = ZIP5 + self.ZIP5_nsprefix_ = None + self.ZIP4 = ZIP4 + self.ZIP4_nsprefix_ = None + self.Phone = Phone + self.Phone_nsprefix_ = None + self.Extension = Extension + self.Extension_nsprefix_ = None + if Package is None: + self.Package = [] + else: + self.Package = Package + self.Package_nsprefix_ = None + self.EstimatedWeight = EstimatedWeight + self.EstimatedWeight_nsprefix_ = None + self.PackageLocation = PackageLocation + self.PackageLocation_nsprefix_ = None + self.SpecialInstructions = SpecialInstructions + self.SpecialInstructions_nsprefix_ = None + self.ConfirmationNumber = ConfirmationNumber + self.ConfirmationNumber_nsprefix_ = None + self.DayOfWeek = DayOfWeek + self.DayOfWeek_nsprefix_ = None + self.Date = Date + self.Date_nsprefix_ = None + self.CarrierRoute = CarrierRoute + self.CarrierRoute_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CarrierPickupCancelResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CarrierPickupCancelResponse.subclass: + return CarrierPickupCancelResponse.subclass(*args_, **kwargs_) + else: + return CarrierPickupCancelResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FirstName(self): + return self.FirstName + def set_FirstName(self, FirstName): + self.FirstName = FirstName + def get_LastName(self): + return self.LastName + def set_LastName(self, LastName): + self.LastName = LastName + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_SuiteOrApt(self): + return self.SuiteOrApt + def set_SuiteOrApt(self, SuiteOrApt): + self.SuiteOrApt = SuiteOrApt + def get_Address2(self): + return self.Address2 + def set_Address2(self, Address2): + self.Address2 = Address2 + def get_Urbanization(self): + return self.Urbanization + def set_Urbanization(self, Urbanization): + self.Urbanization = Urbanization + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_ZIP5(self): + return self.ZIP5 + def set_ZIP5(self, ZIP5): + self.ZIP5 = ZIP5 + def get_ZIP4(self): + return self.ZIP4 + def set_ZIP4(self, ZIP4): + self.ZIP4 = ZIP4 + def get_Phone(self): + return self.Phone + def set_Phone(self, Phone): + self.Phone = Phone + def get_Extension(self): + return self.Extension + def set_Extension(self, Extension): + self.Extension = Extension + def get_Package(self): + return self.Package + def set_Package(self, Package): + self.Package = Package + def add_Package(self, value): + self.Package.append(value) + def insert_Package_at(self, index, value): + self.Package.insert(index, value) + def replace_Package_at(self, index, value): + self.Package[index] = value + def get_EstimatedWeight(self): + return self.EstimatedWeight + def set_EstimatedWeight(self, EstimatedWeight): + self.EstimatedWeight = EstimatedWeight + def get_PackageLocation(self): + return self.PackageLocation + def set_PackageLocation(self, PackageLocation): + self.PackageLocation = PackageLocation + def get_SpecialInstructions(self): + return self.SpecialInstructions + def set_SpecialInstructions(self, SpecialInstructions): + self.SpecialInstructions = SpecialInstructions + def get_ConfirmationNumber(self): + return self.ConfirmationNumber + def set_ConfirmationNumber(self, ConfirmationNumber): + self.ConfirmationNumber = ConfirmationNumber + def get_DayOfWeek(self): + return self.DayOfWeek + def set_DayOfWeek(self, DayOfWeek): + self.DayOfWeek = DayOfWeek + def get_Date(self): + return self.Date + def set_Date(self, Date): + self.Date = Date + def get_CarrierRoute(self): + return self.CarrierRoute + def set_CarrierRoute(self, CarrierRoute): + self.CarrierRoute = CarrierRoute + def has__content(self): + if ( + self.FirstName is not None or + self.LastName is not None or + self.FirmName is not None or + self.SuiteOrApt is not None or + self.Address2 is not None or + self.Urbanization is not None or + self.City is not None or + self.State is not None or + self.ZIP5 is not None or + self.ZIP4 is not None or + self.Phone is not None or + self.Extension is not None or + self.Package or + self.EstimatedWeight is not None or + self.PackageLocation is not None or + self.SpecialInstructions is not None or + self.ConfirmationNumber is not None or + self.DayOfWeek is not None or + self.Date is not None or + self.CarrierRoute is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupCancelResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CarrierPickupCancelResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CarrierPickupCancelResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CarrierPickupCancelResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CarrierPickupCancelResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CarrierPickupCancelResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupCancelResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FirstName is not None: + namespaceprefix_ = self.FirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirstName), input_name='FirstName')), namespaceprefix_ , eol_)) + if self.LastName is not None: + namespaceprefix_ = self.LastName_nsprefix_ + ':' if (UseCapturedNS_ and self.LastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LastName), input_name='LastName')), namespaceprefix_ , eol_)) + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.SuiteOrApt is not None: + namespaceprefix_ = self.SuiteOrApt_nsprefix_ + ':' if (UseCapturedNS_ and self.SuiteOrApt_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSuiteOrApt>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SuiteOrApt), input_name='SuiteOrApt')), namespaceprefix_ , eol_)) + if self.Address2 is not None: + namespaceprefix_ = self.Address2_nsprefix_ + ':' if (UseCapturedNS_ and self.Address2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address2), input_name='Address2')), namespaceprefix_ , eol_)) + if self.Urbanization is not None: + namespaceprefix_ = self.Urbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.Urbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Urbanization), input_name='Urbanization')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.ZIP5 is not None: + namespaceprefix_ = self.ZIP5_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP5), input_name='ZIP5')), namespaceprefix_ , eol_)) + if self.ZIP4 is not None: + namespaceprefix_ = self.ZIP4_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP4), input_name='ZIP4')), namespaceprefix_ , eol_)) + if self.Phone is not None: + namespaceprefix_ = self.Phone_nsprefix_ + ':' if (UseCapturedNS_ and self.Phone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Phone), input_name='Phone')), namespaceprefix_ , eol_)) + if self.Extension is not None: + namespaceprefix_ = self.Extension_nsprefix_ + ':' if (UseCapturedNS_ and self.Extension_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sExtension>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Extension), input_name='Extension')), namespaceprefix_ , eol_)) + for Package_ in self.Package: + namespaceprefix_ = self.Package_nsprefix_ + ':' if (UseCapturedNS_ and self.Package_nsprefix_) else '' + Package_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Package', pretty_print=pretty_print) + if self.EstimatedWeight is not None: + namespaceprefix_ = self.EstimatedWeight_nsprefix_ + ':' if (UseCapturedNS_ and self.EstimatedWeight_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEstimatedWeight>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EstimatedWeight), input_name='EstimatedWeight')), namespaceprefix_ , eol_)) + if self.PackageLocation is not None: + namespaceprefix_ = self.PackageLocation_nsprefix_ + ':' if (UseCapturedNS_ and self.PackageLocation_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPackageLocation>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PackageLocation), input_name='PackageLocation')), namespaceprefix_ , eol_)) + if self.SpecialInstructions is not None: + namespaceprefix_ = self.SpecialInstructions_nsprefix_ + ':' if (UseCapturedNS_ and self.SpecialInstructions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSpecialInstructions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SpecialInstructions), input_name='SpecialInstructions')), namespaceprefix_ , eol_)) + if self.ConfirmationNumber is not None: + namespaceprefix_ = self.ConfirmationNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.ConfirmationNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sConfirmationNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ConfirmationNumber), input_name='ConfirmationNumber')), namespaceprefix_ , eol_)) + if self.DayOfWeek is not None: + namespaceprefix_ = self.DayOfWeek_nsprefix_ + ':' if (UseCapturedNS_ and self.DayOfWeek_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDayOfWeek>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DayOfWeek), input_name='DayOfWeek')), namespaceprefix_ , eol_)) + if self.Date is not None: + namespaceprefix_ = self.Date_nsprefix_ + ':' if (UseCapturedNS_ and self.Date_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Date), input_name='Date')), namespaceprefix_ , eol_)) + if self.CarrierRoute is not None: + namespaceprefix_ = self.CarrierRoute_nsprefix_ + ':' if (UseCapturedNS_ and self.CarrierRoute_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCarrierRoute>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CarrierRoute), input_name='CarrierRoute')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirstName') + value_ = self.gds_validate_string(value_, node, 'FirstName') + self.FirstName = value_ + self.FirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'LastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LastName') + value_ = self.gds_validate_string(value_, node, 'LastName') + self.LastName = value_ + self.LastName_nsprefix_ = child_.prefix + elif nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'SuiteOrApt': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SuiteOrApt') + value_ = self.gds_validate_string(value_, node, 'SuiteOrApt') + self.SuiteOrApt = value_ + self.SuiteOrApt_nsprefix_ = child_.prefix + elif nodeName_ == 'Address2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address2') + value_ = self.gds_validate_string(value_, node, 'Address2') + self.Address2 = value_ + self.Address2_nsprefix_ = child_.prefix + elif nodeName_ == 'Urbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Urbanization') + value_ = self.gds_validate_string(value_, node, 'Urbanization') + self.Urbanization = value_ + self.Urbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP5') + value_ = self.gds_validate_string(value_, node, 'ZIP5') + self.ZIP5 = value_ + self.ZIP5_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP4') + value_ = self.gds_validate_string(value_, node, 'ZIP4') + self.ZIP4 = value_ + self.ZIP4_nsprefix_ = child_.prefix + elif nodeName_ == 'Phone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Phone') + value_ = self.gds_validate_string(value_, node, 'Phone') + self.Phone = value_ + self.Phone_nsprefix_ = child_.prefix + elif nodeName_ == 'Extension': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Extension') + value_ = self.gds_validate_string(value_, node, 'Extension') + self.Extension = value_ + self.Extension_nsprefix_ = child_.prefix + elif nodeName_ == 'Package': + obj_ = PackageType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Package.append(obj_) + obj_.original_tagname_ = 'Package' + elif nodeName_ == 'EstimatedWeight': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EstimatedWeight') + value_ = self.gds_validate_string(value_, node, 'EstimatedWeight') + self.EstimatedWeight = value_ + self.EstimatedWeight_nsprefix_ = child_.prefix + elif nodeName_ == 'PackageLocation': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PackageLocation') + value_ = self.gds_validate_string(value_, node, 'PackageLocation') + self.PackageLocation = value_ + self.PackageLocation_nsprefix_ = child_.prefix + elif nodeName_ == 'SpecialInstructions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SpecialInstructions') + value_ = self.gds_validate_string(value_, node, 'SpecialInstructions') + self.SpecialInstructions = value_ + self.SpecialInstructions_nsprefix_ = child_.prefix + elif nodeName_ == 'ConfirmationNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ConfirmationNumber') + value_ = self.gds_validate_string(value_, node, 'ConfirmationNumber') + self.ConfirmationNumber = value_ + self.ConfirmationNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'DayOfWeek': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DayOfWeek') + value_ = self.gds_validate_string(value_, node, 'DayOfWeek') + self.DayOfWeek = value_ + self.DayOfWeek_nsprefix_ = child_.prefix + elif nodeName_ == 'Date': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Date') + value_ = self.gds_validate_string(value_, node, 'Date') + self.Date = value_ + self.Date_nsprefix_ = child_.prefix + elif nodeName_ == 'CarrierRoute': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CarrierRoute') + value_ = self.gds_validate_string(value_, node, 'CarrierRoute') + self.CarrierRoute = value_ + self.CarrierRoute_nsprefix_ = child_.prefix +# end class CarrierPickupCancelResponse + + +class PackageType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ServiceType=None, Count=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ServiceType = ServiceType + self.ServiceType_nsprefix_ = None + self.Count = Count + self.Count_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PackageType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PackageType.subclass: + return PackageType.subclass(*args_, **kwargs_) + else: + return PackageType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ServiceType(self): + return self.ServiceType + def set_ServiceType(self, ServiceType): + self.ServiceType = ServiceType + def get_Count(self): + return self.Count + def set_Count(self, Count): + self.Count = Count + def has__content(self): + if ( + self.ServiceType is not None or + self.Count is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PackageType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PackageType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PackageType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PackageType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PackageType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ServiceType is not None: + namespaceprefix_ = self.ServiceType_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceType), input_name='ServiceType')), namespaceprefix_ , eol_)) + if self.Count is not None: + namespaceprefix_ = self.Count_nsprefix_ + ':' if (UseCapturedNS_ and self.Count_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCount>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Count, input_name='Count'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ServiceType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceType') + value_ = self.gds_validate_string(value_, node, 'ServiceType') + self.ServiceType = value_ + self.ServiceType_nsprefix_ = child_.prefix + elif nodeName_ == 'Count' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Count') + ival_ = self.gds_validate_integer(ival_, node, 'Count') + self.Count = ival_ + self.Count_nsprefix_ = child_.prefix +# end class PackageType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupCancelResponse' + rootClass = CarrierPickupCancelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupCancelResponse' + rootClass = CarrierPickupCancelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupCancelResponse' + rootClass = CarrierPickupCancelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupCancelResponse' + rootClass = CarrierPickupCancelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from carrier_pickup_cancel_response import *\n\n') + sys.stdout.write('import carrier_pickup_cancel_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CarrierPickupCancelResponse", + "PackageType" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_change_request.py b/modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_change_request.py new file mode 100644 index 0000000000..de5d814e5e --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_change_request.py @@ -0,0 +1,1744 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:02 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/carrier_pickup_change_request.py') +# +# Command line arguments: +# ./schemas/CarrierPickupChangeRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/carrier_pickup_change_request.py" ./schemas/CarrierPickupChangeRequest.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class CarrierPickupChangeRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, FirstName=None, LastName=None, FirmName=None, SuiteOrApt=None, Address2=None, Urbanization=None, City=None, State=None, ZIP5=None, ZIP4=None, Phone=None, Extension=None, Package=None, EstimatedWeight=None, PackageLocation=None, SpecialInstructions=None, ConfirmationNumber=None, EmailAddress=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.FirstName = FirstName + self.FirstName_nsprefix_ = None + self.LastName = LastName + self.LastName_nsprefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.SuiteOrApt = SuiteOrApt + self.SuiteOrApt_nsprefix_ = None + self.Address2 = Address2 + self.Address2_nsprefix_ = None + self.Urbanization = Urbanization + self.Urbanization_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.ZIP5 = ZIP5 + self.ZIP5_nsprefix_ = None + self.ZIP4 = ZIP4 + self.ZIP4_nsprefix_ = None + self.Phone = Phone + self.Phone_nsprefix_ = None + self.Extension = Extension + self.Extension_nsprefix_ = None + if Package is None: + self.Package = [] + else: + self.Package = Package + self.Package_nsprefix_ = None + self.EstimatedWeight = EstimatedWeight + self.EstimatedWeight_nsprefix_ = None + self.PackageLocation = PackageLocation + self.PackageLocation_nsprefix_ = None + self.SpecialInstructions = SpecialInstructions + self.SpecialInstructions_nsprefix_ = None + self.ConfirmationNumber = ConfirmationNumber + self.ConfirmationNumber_nsprefix_ = None + self.EmailAddress = EmailAddress + self.EmailAddress_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CarrierPickupChangeRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CarrierPickupChangeRequest.subclass: + return CarrierPickupChangeRequest.subclass(*args_, **kwargs_) + else: + return CarrierPickupChangeRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FirstName(self): + return self.FirstName + def set_FirstName(self, FirstName): + self.FirstName = FirstName + def get_LastName(self): + return self.LastName + def set_LastName(self, LastName): + self.LastName = LastName + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_SuiteOrApt(self): + return self.SuiteOrApt + def set_SuiteOrApt(self, SuiteOrApt): + self.SuiteOrApt = SuiteOrApt + def get_Address2(self): + return self.Address2 + def set_Address2(self, Address2): + self.Address2 = Address2 + def get_Urbanization(self): + return self.Urbanization + def set_Urbanization(self, Urbanization): + self.Urbanization = Urbanization + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_ZIP5(self): + return self.ZIP5 + def set_ZIP5(self, ZIP5): + self.ZIP5 = ZIP5 + def get_ZIP4(self): + return self.ZIP4 + def set_ZIP4(self, ZIP4): + self.ZIP4 = ZIP4 + def get_Phone(self): + return self.Phone + def set_Phone(self, Phone): + self.Phone = Phone + def get_Extension(self): + return self.Extension + def set_Extension(self, Extension): + self.Extension = Extension + def get_Package(self): + return self.Package + def set_Package(self, Package): + self.Package = Package + def add_Package(self, value): + self.Package.append(value) + def insert_Package_at(self, index, value): + self.Package.insert(index, value) + def replace_Package_at(self, index, value): + self.Package[index] = value + def get_EstimatedWeight(self): + return self.EstimatedWeight + def set_EstimatedWeight(self, EstimatedWeight): + self.EstimatedWeight = EstimatedWeight + def get_PackageLocation(self): + return self.PackageLocation + def set_PackageLocation(self, PackageLocation): + self.PackageLocation = PackageLocation + def get_SpecialInstructions(self): + return self.SpecialInstructions + def set_SpecialInstructions(self, SpecialInstructions): + self.SpecialInstructions = SpecialInstructions + def get_ConfirmationNumber(self): + return self.ConfirmationNumber + def set_ConfirmationNumber(self, ConfirmationNumber): + self.ConfirmationNumber = ConfirmationNumber + def get_EmailAddress(self): + return self.EmailAddress + def set_EmailAddress(self, EmailAddress): + self.EmailAddress = EmailAddress + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.FirstName is not None or + self.LastName is not None or + self.FirmName is not None or + self.SuiteOrApt is not None or + self.Address2 is not None or + self.Urbanization is not None or + self.City is not None or + self.State is not None or + self.ZIP5 is not None or + self.ZIP4 is not None or + self.Phone is not None or + self.Extension is not None or + self.Package or + self.EstimatedWeight is not None or + self.PackageLocation is not None or + self.SpecialInstructions is not None or + self.ConfirmationNumber is not None or + self.EmailAddress is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupChangeRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CarrierPickupChangeRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CarrierPickupChangeRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CarrierPickupChangeRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CarrierPickupChangeRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CarrierPickupChangeRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupChangeRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FirstName is not None: + namespaceprefix_ = self.FirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirstName), input_name='FirstName')), namespaceprefix_ , eol_)) + if self.LastName is not None: + namespaceprefix_ = self.LastName_nsprefix_ + ':' if (UseCapturedNS_ and self.LastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LastName), input_name='LastName')), namespaceprefix_ , eol_)) + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.SuiteOrApt is not None: + namespaceprefix_ = self.SuiteOrApt_nsprefix_ + ':' if (UseCapturedNS_ and self.SuiteOrApt_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSuiteOrApt>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SuiteOrApt), input_name='SuiteOrApt')), namespaceprefix_ , eol_)) + if self.Address2 is not None: + namespaceprefix_ = self.Address2_nsprefix_ + ':' if (UseCapturedNS_ and self.Address2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address2), input_name='Address2')), namespaceprefix_ , eol_)) + if self.Urbanization is not None: + namespaceprefix_ = self.Urbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.Urbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Urbanization), input_name='Urbanization')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.ZIP5 is not None: + namespaceprefix_ = self.ZIP5_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP5), input_name='ZIP5')), namespaceprefix_ , eol_)) + if self.ZIP4 is not None: + namespaceprefix_ = self.ZIP4_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP4), input_name='ZIP4')), namespaceprefix_ , eol_)) + if self.Phone is not None: + namespaceprefix_ = self.Phone_nsprefix_ + ':' if (UseCapturedNS_ and self.Phone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Phone), input_name='Phone')), namespaceprefix_ , eol_)) + if self.Extension is not None: + namespaceprefix_ = self.Extension_nsprefix_ + ':' if (UseCapturedNS_ and self.Extension_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sExtension>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Extension), input_name='Extension')), namespaceprefix_ , eol_)) + for Package_ in self.Package: + namespaceprefix_ = self.Package_nsprefix_ + ':' if (UseCapturedNS_ and self.Package_nsprefix_) else '' + Package_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Package', pretty_print=pretty_print) + if self.EstimatedWeight is not None: + namespaceprefix_ = self.EstimatedWeight_nsprefix_ + ':' if (UseCapturedNS_ and self.EstimatedWeight_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEstimatedWeight>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EstimatedWeight), input_name='EstimatedWeight')), namespaceprefix_ , eol_)) + if self.PackageLocation is not None: + namespaceprefix_ = self.PackageLocation_nsprefix_ + ':' if (UseCapturedNS_ and self.PackageLocation_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPackageLocation>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PackageLocation), input_name='PackageLocation')), namespaceprefix_ , eol_)) + if self.SpecialInstructions is not None: + namespaceprefix_ = self.SpecialInstructions_nsprefix_ + ':' if (UseCapturedNS_ and self.SpecialInstructions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSpecialInstructions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SpecialInstructions), input_name='SpecialInstructions')), namespaceprefix_ , eol_)) + if self.ConfirmationNumber is not None: + namespaceprefix_ = self.ConfirmationNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.ConfirmationNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sConfirmationNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ConfirmationNumber), input_name='ConfirmationNumber')), namespaceprefix_ , eol_)) + if self.EmailAddress is not None: + namespaceprefix_ = self.EmailAddress_nsprefix_ + ':' if (UseCapturedNS_ and self.EmailAddress_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEmailAddress>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EmailAddress), input_name='EmailAddress')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirstName') + value_ = self.gds_validate_string(value_, node, 'FirstName') + self.FirstName = value_ + self.FirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'LastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LastName') + value_ = self.gds_validate_string(value_, node, 'LastName') + self.LastName = value_ + self.LastName_nsprefix_ = child_.prefix + elif nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'SuiteOrApt': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SuiteOrApt') + value_ = self.gds_validate_string(value_, node, 'SuiteOrApt') + self.SuiteOrApt = value_ + self.SuiteOrApt_nsprefix_ = child_.prefix + elif nodeName_ == 'Address2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address2') + value_ = self.gds_validate_string(value_, node, 'Address2') + self.Address2 = value_ + self.Address2_nsprefix_ = child_.prefix + elif nodeName_ == 'Urbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Urbanization') + value_ = self.gds_validate_string(value_, node, 'Urbanization') + self.Urbanization = value_ + self.Urbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP5') + value_ = self.gds_validate_string(value_, node, 'ZIP5') + self.ZIP5 = value_ + self.ZIP5_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP4') + value_ = self.gds_validate_string(value_, node, 'ZIP4') + self.ZIP4 = value_ + self.ZIP4_nsprefix_ = child_.prefix + elif nodeName_ == 'Phone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Phone') + value_ = self.gds_validate_string(value_, node, 'Phone') + self.Phone = value_ + self.Phone_nsprefix_ = child_.prefix + elif nodeName_ == 'Extension': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Extension') + value_ = self.gds_validate_string(value_, node, 'Extension') + self.Extension = value_ + self.Extension_nsprefix_ = child_.prefix + elif nodeName_ == 'Package': + obj_ = PackageType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Package.append(obj_) + obj_.original_tagname_ = 'Package' + elif nodeName_ == 'EstimatedWeight': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EstimatedWeight') + value_ = self.gds_validate_string(value_, node, 'EstimatedWeight') + self.EstimatedWeight = value_ + self.EstimatedWeight_nsprefix_ = child_.prefix + elif nodeName_ == 'PackageLocation': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PackageLocation') + value_ = self.gds_validate_string(value_, node, 'PackageLocation') + self.PackageLocation = value_ + self.PackageLocation_nsprefix_ = child_.prefix + elif nodeName_ == 'SpecialInstructions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SpecialInstructions') + value_ = self.gds_validate_string(value_, node, 'SpecialInstructions') + self.SpecialInstructions = value_ + self.SpecialInstructions_nsprefix_ = child_.prefix + elif nodeName_ == 'ConfirmationNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ConfirmationNumber') + value_ = self.gds_validate_string(value_, node, 'ConfirmationNumber') + self.ConfirmationNumber = value_ + self.ConfirmationNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'EmailAddress': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EmailAddress') + value_ = self.gds_validate_string(value_, node, 'EmailAddress') + self.EmailAddress = value_ + self.EmailAddress_nsprefix_ = child_.prefix +# end class CarrierPickupChangeRequest + + +class PackageType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ServiceType=None, Count=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ServiceType = ServiceType + self.ServiceType_nsprefix_ = None + self.Count = Count + self.Count_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PackageType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PackageType.subclass: + return PackageType.subclass(*args_, **kwargs_) + else: + return PackageType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ServiceType(self): + return self.ServiceType + def set_ServiceType(self, ServiceType): + self.ServiceType = ServiceType + def get_Count(self): + return self.Count + def set_Count(self, Count): + self.Count = Count + def has__content(self): + if ( + self.ServiceType is not None or + self.Count is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PackageType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PackageType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PackageType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PackageType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PackageType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ServiceType is not None: + namespaceprefix_ = self.ServiceType_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceType), input_name='ServiceType')), namespaceprefix_ , eol_)) + if self.Count is not None: + namespaceprefix_ = self.Count_nsprefix_ + ':' if (UseCapturedNS_ and self.Count_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCount>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Count, input_name='Count'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ServiceType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceType') + value_ = self.gds_validate_string(value_, node, 'ServiceType') + self.ServiceType = value_ + self.ServiceType_nsprefix_ = child_.prefix + elif nodeName_ == 'Count' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Count') + ival_ = self.gds_validate_integer(ival_, node, 'Count') + self.Count = ival_ + self.Count_nsprefix_ = child_.prefix +# end class PackageType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupChangeRequest' + rootClass = CarrierPickupChangeRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupChangeRequest' + rootClass = CarrierPickupChangeRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupChangeRequest' + rootClass = CarrierPickupChangeRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupChangeRequest' + rootClass = CarrierPickupChangeRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from carrier_pickup_change_request import *\n\n') + sys.stdout.write('import carrier_pickup_change_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CarrierPickupChangeRequest", + "PackageType" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_change_response.py b/modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_change_response.py new file mode 100644 index 0000000000..b80f2d5977 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_change_response.py @@ -0,0 +1,1754 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:02 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/carrier_pickup_change_response.py') +# +# Command line arguments: +# ./schemas/CarrierPickupChangeResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/carrier_pickup_change_response.py" ./schemas/CarrierPickupChangeResponse.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class CarrierPickupChangeResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, FirstName=None, LastName=None, FirmName=None, SuiteOrApt=None, Address2=None, Urbanization=None, City=None, State=None, ZIP5=None, ZIP4=None, Phone=None, Extension=None, Package=None, EstimatedWeight=None, PackageLocation=None, SpecialInstructions=None, ConfirmationNumber=None, DayOfWeek=None, Date=None, Status=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.FirstName = FirstName + self.FirstName_nsprefix_ = None + self.LastName = LastName + self.LastName_nsprefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.SuiteOrApt = SuiteOrApt + self.SuiteOrApt_nsprefix_ = None + self.Address2 = Address2 + self.Address2_nsprefix_ = None + self.Urbanization = Urbanization + self.Urbanization_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.ZIP5 = ZIP5 + self.ZIP5_nsprefix_ = None + self.ZIP4 = ZIP4 + self.ZIP4_nsprefix_ = None + self.Phone = Phone + self.Phone_nsprefix_ = None + self.Extension = Extension + self.Extension_nsprefix_ = None + if Package is None: + self.Package = [] + else: + self.Package = Package + self.Package_nsprefix_ = None + self.EstimatedWeight = EstimatedWeight + self.EstimatedWeight_nsprefix_ = None + self.PackageLocation = PackageLocation + self.PackageLocation_nsprefix_ = None + self.SpecialInstructions = SpecialInstructions + self.SpecialInstructions_nsprefix_ = None + self.ConfirmationNumber = ConfirmationNumber + self.ConfirmationNumber_nsprefix_ = None + self.DayOfWeek = DayOfWeek + self.DayOfWeek_nsprefix_ = None + self.Date = Date + self.Date_nsprefix_ = None + self.Status = Status + self.Status_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CarrierPickupChangeResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CarrierPickupChangeResponse.subclass: + return CarrierPickupChangeResponse.subclass(*args_, **kwargs_) + else: + return CarrierPickupChangeResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FirstName(self): + return self.FirstName + def set_FirstName(self, FirstName): + self.FirstName = FirstName + def get_LastName(self): + return self.LastName + def set_LastName(self, LastName): + self.LastName = LastName + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_SuiteOrApt(self): + return self.SuiteOrApt + def set_SuiteOrApt(self, SuiteOrApt): + self.SuiteOrApt = SuiteOrApt + def get_Address2(self): + return self.Address2 + def set_Address2(self, Address2): + self.Address2 = Address2 + def get_Urbanization(self): + return self.Urbanization + def set_Urbanization(self, Urbanization): + self.Urbanization = Urbanization + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_ZIP5(self): + return self.ZIP5 + def set_ZIP5(self, ZIP5): + self.ZIP5 = ZIP5 + def get_ZIP4(self): + return self.ZIP4 + def set_ZIP4(self, ZIP4): + self.ZIP4 = ZIP4 + def get_Phone(self): + return self.Phone + def set_Phone(self, Phone): + self.Phone = Phone + def get_Extension(self): + return self.Extension + def set_Extension(self, Extension): + self.Extension = Extension + def get_Package(self): + return self.Package + def set_Package(self, Package): + self.Package = Package + def add_Package(self, value): + self.Package.append(value) + def insert_Package_at(self, index, value): + self.Package.insert(index, value) + def replace_Package_at(self, index, value): + self.Package[index] = value + def get_EstimatedWeight(self): + return self.EstimatedWeight + def set_EstimatedWeight(self, EstimatedWeight): + self.EstimatedWeight = EstimatedWeight + def get_PackageLocation(self): + return self.PackageLocation + def set_PackageLocation(self, PackageLocation): + self.PackageLocation = PackageLocation + def get_SpecialInstructions(self): + return self.SpecialInstructions + def set_SpecialInstructions(self, SpecialInstructions): + self.SpecialInstructions = SpecialInstructions + def get_ConfirmationNumber(self): + return self.ConfirmationNumber + def set_ConfirmationNumber(self, ConfirmationNumber): + self.ConfirmationNumber = ConfirmationNumber + def get_DayOfWeek(self): + return self.DayOfWeek + def set_DayOfWeek(self, DayOfWeek): + self.DayOfWeek = DayOfWeek + def get_Date(self): + return self.Date + def set_Date(self, Date): + self.Date = Date + def get_Status(self): + return self.Status + def set_Status(self, Status): + self.Status = Status + def has__content(self): + if ( + self.FirstName is not None or + self.LastName is not None or + self.FirmName is not None or + self.SuiteOrApt is not None or + self.Address2 is not None or + self.Urbanization is not None or + self.City is not None or + self.State is not None or + self.ZIP5 is not None or + self.ZIP4 is not None or + self.Phone is not None or + self.Extension is not None or + self.Package or + self.EstimatedWeight is not None or + self.PackageLocation is not None or + self.SpecialInstructions is not None or + self.ConfirmationNumber is not None or + self.DayOfWeek is not None or + self.Date is not None or + self.Status is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupChangeResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CarrierPickupChangeResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CarrierPickupChangeResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CarrierPickupChangeResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CarrierPickupChangeResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CarrierPickupChangeResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupChangeResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FirstName is not None: + namespaceprefix_ = self.FirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirstName), input_name='FirstName')), namespaceprefix_ , eol_)) + if self.LastName is not None: + namespaceprefix_ = self.LastName_nsprefix_ + ':' if (UseCapturedNS_ and self.LastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LastName), input_name='LastName')), namespaceprefix_ , eol_)) + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.SuiteOrApt is not None: + namespaceprefix_ = self.SuiteOrApt_nsprefix_ + ':' if (UseCapturedNS_ and self.SuiteOrApt_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSuiteOrApt>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SuiteOrApt), input_name='SuiteOrApt')), namespaceprefix_ , eol_)) + if self.Address2 is not None: + namespaceprefix_ = self.Address2_nsprefix_ + ':' if (UseCapturedNS_ and self.Address2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address2), input_name='Address2')), namespaceprefix_ , eol_)) + if self.Urbanization is not None: + namespaceprefix_ = self.Urbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.Urbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Urbanization), input_name='Urbanization')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.ZIP5 is not None: + namespaceprefix_ = self.ZIP5_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP5), input_name='ZIP5')), namespaceprefix_ , eol_)) + if self.ZIP4 is not None: + namespaceprefix_ = self.ZIP4_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP4), input_name='ZIP4')), namespaceprefix_ , eol_)) + if self.Phone is not None: + namespaceprefix_ = self.Phone_nsprefix_ + ':' if (UseCapturedNS_ and self.Phone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Phone), input_name='Phone')), namespaceprefix_ , eol_)) + if self.Extension is not None: + namespaceprefix_ = self.Extension_nsprefix_ + ':' if (UseCapturedNS_ and self.Extension_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sExtension>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Extension), input_name='Extension')), namespaceprefix_ , eol_)) + for Package_ in self.Package: + namespaceprefix_ = self.Package_nsprefix_ + ':' if (UseCapturedNS_ and self.Package_nsprefix_) else '' + Package_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Package', pretty_print=pretty_print) + if self.EstimatedWeight is not None: + namespaceprefix_ = self.EstimatedWeight_nsprefix_ + ':' if (UseCapturedNS_ and self.EstimatedWeight_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEstimatedWeight>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EstimatedWeight), input_name='EstimatedWeight')), namespaceprefix_ , eol_)) + if self.PackageLocation is not None: + namespaceprefix_ = self.PackageLocation_nsprefix_ + ':' if (UseCapturedNS_ and self.PackageLocation_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPackageLocation>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PackageLocation), input_name='PackageLocation')), namespaceprefix_ , eol_)) + if self.SpecialInstructions is not None: + namespaceprefix_ = self.SpecialInstructions_nsprefix_ + ':' if (UseCapturedNS_ and self.SpecialInstructions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSpecialInstructions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SpecialInstructions), input_name='SpecialInstructions')), namespaceprefix_ , eol_)) + if self.ConfirmationNumber is not None: + namespaceprefix_ = self.ConfirmationNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.ConfirmationNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sConfirmationNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ConfirmationNumber), input_name='ConfirmationNumber')), namespaceprefix_ , eol_)) + if self.DayOfWeek is not None: + namespaceprefix_ = self.DayOfWeek_nsprefix_ + ':' if (UseCapturedNS_ and self.DayOfWeek_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDayOfWeek>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DayOfWeek), input_name='DayOfWeek')), namespaceprefix_ , eol_)) + if self.Date is not None: + namespaceprefix_ = self.Date_nsprefix_ + ':' if (UseCapturedNS_ and self.Date_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Date), input_name='Date')), namespaceprefix_ , eol_)) + if self.Status is not None: + namespaceprefix_ = self.Status_nsprefix_ + ':' if (UseCapturedNS_ and self.Status_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sStatus>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Status), input_name='Status')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirstName') + value_ = self.gds_validate_string(value_, node, 'FirstName') + self.FirstName = value_ + self.FirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'LastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LastName') + value_ = self.gds_validate_string(value_, node, 'LastName') + self.LastName = value_ + self.LastName_nsprefix_ = child_.prefix + elif nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'SuiteOrApt': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SuiteOrApt') + value_ = self.gds_validate_string(value_, node, 'SuiteOrApt') + self.SuiteOrApt = value_ + self.SuiteOrApt_nsprefix_ = child_.prefix + elif nodeName_ == 'Address2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address2') + value_ = self.gds_validate_string(value_, node, 'Address2') + self.Address2 = value_ + self.Address2_nsprefix_ = child_.prefix + elif nodeName_ == 'Urbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Urbanization') + value_ = self.gds_validate_string(value_, node, 'Urbanization') + self.Urbanization = value_ + self.Urbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP5') + value_ = self.gds_validate_string(value_, node, 'ZIP5') + self.ZIP5 = value_ + self.ZIP5_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP4') + value_ = self.gds_validate_string(value_, node, 'ZIP4') + self.ZIP4 = value_ + self.ZIP4_nsprefix_ = child_.prefix + elif nodeName_ == 'Phone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Phone') + value_ = self.gds_validate_string(value_, node, 'Phone') + self.Phone = value_ + self.Phone_nsprefix_ = child_.prefix + elif nodeName_ == 'Extension': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Extension') + value_ = self.gds_validate_string(value_, node, 'Extension') + self.Extension = value_ + self.Extension_nsprefix_ = child_.prefix + elif nodeName_ == 'Package': + obj_ = PackageType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Package.append(obj_) + obj_.original_tagname_ = 'Package' + elif nodeName_ == 'EstimatedWeight': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EstimatedWeight') + value_ = self.gds_validate_string(value_, node, 'EstimatedWeight') + self.EstimatedWeight = value_ + self.EstimatedWeight_nsprefix_ = child_.prefix + elif nodeName_ == 'PackageLocation': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PackageLocation') + value_ = self.gds_validate_string(value_, node, 'PackageLocation') + self.PackageLocation = value_ + self.PackageLocation_nsprefix_ = child_.prefix + elif nodeName_ == 'SpecialInstructions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SpecialInstructions') + value_ = self.gds_validate_string(value_, node, 'SpecialInstructions') + self.SpecialInstructions = value_ + self.SpecialInstructions_nsprefix_ = child_.prefix + elif nodeName_ == 'ConfirmationNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ConfirmationNumber') + value_ = self.gds_validate_string(value_, node, 'ConfirmationNumber') + self.ConfirmationNumber = value_ + self.ConfirmationNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'DayOfWeek': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DayOfWeek') + value_ = self.gds_validate_string(value_, node, 'DayOfWeek') + self.DayOfWeek = value_ + self.DayOfWeek_nsprefix_ = child_.prefix + elif nodeName_ == 'Date': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Date') + value_ = self.gds_validate_string(value_, node, 'Date') + self.Date = value_ + self.Date_nsprefix_ = child_.prefix + elif nodeName_ == 'Status': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Status') + value_ = self.gds_validate_string(value_, node, 'Status') + self.Status = value_ + self.Status_nsprefix_ = child_.prefix +# end class CarrierPickupChangeResponse + + +class PackageType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ServiceType=None, Count=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ServiceType = ServiceType + self.ServiceType_nsprefix_ = None + self.Count = Count + self.Count_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PackageType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PackageType.subclass: + return PackageType.subclass(*args_, **kwargs_) + else: + return PackageType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ServiceType(self): + return self.ServiceType + def set_ServiceType(self, ServiceType): + self.ServiceType = ServiceType + def get_Count(self): + return self.Count + def set_Count(self, Count): + self.Count = Count + def has__content(self): + if ( + self.ServiceType is not None or + self.Count is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PackageType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PackageType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PackageType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PackageType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PackageType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ServiceType is not None: + namespaceprefix_ = self.ServiceType_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceType), input_name='ServiceType')), namespaceprefix_ , eol_)) + if self.Count is not None: + namespaceprefix_ = self.Count_nsprefix_ + ':' if (UseCapturedNS_ and self.Count_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCount>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Count, input_name='Count'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ServiceType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceType') + value_ = self.gds_validate_string(value_, node, 'ServiceType') + self.ServiceType = value_ + self.ServiceType_nsprefix_ = child_.prefix + elif nodeName_ == 'Count' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Count') + ival_ = self.gds_validate_integer(ival_, node, 'Count') + self.Count = ival_ + self.Count_nsprefix_ = child_.prefix +# end class PackageType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupChangeResponse' + rootClass = CarrierPickupChangeResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupChangeResponse' + rootClass = CarrierPickupChangeResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupChangeResponse' + rootClass = CarrierPickupChangeResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupChangeResponse' + rootClass = CarrierPickupChangeResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from carrier_pickup_change_response import *\n\n') + sys.stdout.write('import carrier_pickup_change_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CarrierPickupChangeResponse", + "PackageType" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_inquiry_request.py b/modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_inquiry_request.py new file mode 100644 index 0000000000..7b71217c20 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_inquiry_request.py @@ -0,0 +1,1471 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:02 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/carrier_pickup_inquiry_request.py') +# +# Command line arguments: +# ./schemas/CarrierPickupInquiryRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/carrier_pickup_inquiry_request.py" ./schemas/CarrierPickupInquiryRequest.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class CarrierPickupInquiryRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, FirmName=None, SuiteOrApt=None, Address2=None, Urbanization=None, City=None, State=None, ZIP5=None, ZIP4=None, ConfirmationNumber=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.SuiteOrApt = SuiteOrApt + self.SuiteOrApt_nsprefix_ = None + self.Address2 = Address2 + self.Address2_nsprefix_ = None + self.Urbanization = Urbanization + self.Urbanization_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.ZIP5 = ZIP5 + self.ZIP5_nsprefix_ = None + self.ZIP4 = ZIP4 + self.ZIP4_nsprefix_ = None + self.ConfirmationNumber = ConfirmationNumber + self.ConfirmationNumber_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CarrierPickupInquiryRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CarrierPickupInquiryRequest.subclass: + return CarrierPickupInquiryRequest.subclass(*args_, **kwargs_) + else: + return CarrierPickupInquiryRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_SuiteOrApt(self): + return self.SuiteOrApt + def set_SuiteOrApt(self, SuiteOrApt): + self.SuiteOrApt = SuiteOrApt + def get_Address2(self): + return self.Address2 + def set_Address2(self, Address2): + self.Address2 = Address2 + def get_Urbanization(self): + return self.Urbanization + def set_Urbanization(self, Urbanization): + self.Urbanization = Urbanization + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_ZIP5(self): + return self.ZIP5 + def set_ZIP5(self, ZIP5): + self.ZIP5 = ZIP5 + def get_ZIP4(self): + return self.ZIP4 + def set_ZIP4(self, ZIP4): + self.ZIP4 = ZIP4 + def get_ConfirmationNumber(self): + return self.ConfirmationNumber + def set_ConfirmationNumber(self, ConfirmationNumber): + self.ConfirmationNumber = ConfirmationNumber + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.FirmName is not None or + self.SuiteOrApt is not None or + self.Address2 is not None or + self.Urbanization is not None or + self.City is not None or + self.State is not None or + self.ZIP5 is not None or + self.ZIP4 is not None or + self.ConfirmationNumber is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupInquiryRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CarrierPickupInquiryRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CarrierPickupInquiryRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CarrierPickupInquiryRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CarrierPickupInquiryRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CarrierPickupInquiryRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupInquiryRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.SuiteOrApt is not None: + namespaceprefix_ = self.SuiteOrApt_nsprefix_ + ':' if (UseCapturedNS_ and self.SuiteOrApt_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSuiteOrApt>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SuiteOrApt), input_name='SuiteOrApt')), namespaceprefix_ , eol_)) + if self.Address2 is not None: + namespaceprefix_ = self.Address2_nsprefix_ + ':' if (UseCapturedNS_ and self.Address2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address2), input_name='Address2')), namespaceprefix_ , eol_)) + if self.Urbanization is not None: + namespaceprefix_ = self.Urbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.Urbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Urbanization), input_name='Urbanization')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.ZIP5 is not None: + namespaceprefix_ = self.ZIP5_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP5), input_name='ZIP5')), namespaceprefix_ , eol_)) + if self.ZIP4 is not None: + namespaceprefix_ = self.ZIP4_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP4), input_name='ZIP4')), namespaceprefix_ , eol_)) + if self.ConfirmationNumber is not None: + namespaceprefix_ = self.ConfirmationNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.ConfirmationNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sConfirmationNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ConfirmationNumber), input_name='ConfirmationNumber')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'SuiteOrApt': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SuiteOrApt') + value_ = self.gds_validate_string(value_, node, 'SuiteOrApt') + self.SuiteOrApt = value_ + self.SuiteOrApt_nsprefix_ = child_.prefix + elif nodeName_ == 'Address2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address2') + value_ = self.gds_validate_string(value_, node, 'Address2') + self.Address2 = value_ + self.Address2_nsprefix_ = child_.prefix + elif nodeName_ == 'Urbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Urbanization') + value_ = self.gds_validate_string(value_, node, 'Urbanization') + self.Urbanization = value_ + self.Urbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP5') + value_ = self.gds_validate_string(value_, node, 'ZIP5') + self.ZIP5 = value_ + self.ZIP5_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP4') + value_ = self.gds_validate_string(value_, node, 'ZIP4') + self.ZIP4 = value_ + self.ZIP4_nsprefix_ = child_.prefix + elif nodeName_ == 'ConfirmationNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ConfirmationNumber') + value_ = self.gds_validate_string(value_, node, 'ConfirmationNumber') + self.ConfirmationNumber = value_ + self.ConfirmationNumber_nsprefix_ = child_.prefix +# end class CarrierPickupInquiryRequest + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupInquiryRequest' + rootClass = CarrierPickupInquiryRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupInquiryRequest' + rootClass = CarrierPickupInquiryRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupInquiryRequest' + rootClass = CarrierPickupInquiryRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupInquiryRequest' + rootClass = CarrierPickupInquiryRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from carrier_pickup_inquiry_request import *\n\n') + sys.stdout.write('import carrier_pickup_inquiry_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CarrierPickupInquiryRequest" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_inquiry_response.py b/modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_inquiry_response.py new file mode 100644 index 0000000000..c97effb350 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_inquiry_response.py @@ -0,0 +1,1754 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:02 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/carrier_pickup_inquiry_response.py') +# +# Command line arguments: +# ./schemas/CarrierPickupInquiryResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/carrier_pickup_inquiry_response.py" ./schemas/CarrierPickupInquiryResponse.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class CarrierPickupInquiryResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, FirstName=None, LastName=None, FirmName=None, SuiteOrApt=None, Address2=None, Urbanization=None, City=None, State=None, ZIP5=None, ZIP4=None, Phone=None, Extension=None, Package=None, EstimatedWeight=None, PackageLocation=None, SpecialInstructions=None, ConfirmationNumber=None, DayOfWeek=None, Date=None, EmailAddress=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.FirstName = FirstName + self.FirstName_nsprefix_ = None + self.LastName = LastName + self.LastName_nsprefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.SuiteOrApt = SuiteOrApt + self.SuiteOrApt_nsprefix_ = None + self.Address2 = Address2 + self.Address2_nsprefix_ = None + self.Urbanization = Urbanization + self.Urbanization_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.ZIP5 = ZIP5 + self.ZIP5_nsprefix_ = None + self.ZIP4 = ZIP4 + self.ZIP4_nsprefix_ = None + self.Phone = Phone + self.Phone_nsprefix_ = None + self.Extension = Extension + self.Extension_nsprefix_ = None + if Package is None: + self.Package = [] + else: + self.Package = Package + self.Package_nsprefix_ = None + self.EstimatedWeight = EstimatedWeight + self.EstimatedWeight_nsprefix_ = None + self.PackageLocation = PackageLocation + self.PackageLocation_nsprefix_ = None + self.SpecialInstructions = SpecialInstructions + self.SpecialInstructions_nsprefix_ = None + self.ConfirmationNumber = ConfirmationNumber + self.ConfirmationNumber_nsprefix_ = None + self.DayOfWeek = DayOfWeek + self.DayOfWeek_nsprefix_ = None + self.Date = Date + self.Date_nsprefix_ = None + self.EmailAddress = EmailAddress + self.EmailAddress_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CarrierPickupInquiryResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CarrierPickupInquiryResponse.subclass: + return CarrierPickupInquiryResponse.subclass(*args_, **kwargs_) + else: + return CarrierPickupInquiryResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FirstName(self): + return self.FirstName + def set_FirstName(self, FirstName): + self.FirstName = FirstName + def get_LastName(self): + return self.LastName + def set_LastName(self, LastName): + self.LastName = LastName + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_SuiteOrApt(self): + return self.SuiteOrApt + def set_SuiteOrApt(self, SuiteOrApt): + self.SuiteOrApt = SuiteOrApt + def get_Address2(self): + return self.Address2 + def set_Address2(self, Address2): + self.Address2 = Address2 + def get_Urbanization(self): + return self.Urbanization + def set_Urbanization(self, Urbanization): + self.Urbanization = Urbanization + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_ZIP5(self): + return self.ZIP5 + def set_ZIP5(self, ZIP5): + self.ZIP5 = ZIP5 + def get_ZIP4(self): + return self.ZIP4 + def set_ZIP4(self, ZIP4): + self.ZIP4 = ZIP4 + def get_Phone(self): + return self.Phone + def set_Phone(self, Phone): + self.Phone = Phone + def get_Extension(self): + return self.Extension + def set_Extension(self, Extension): + self.Extension = Extension + def get_Package(self): + return self.Package + def set_Package(self, Package): + self.Package = Package + def add_Package(self, value): + self.Package.append(value) + def insert_Package_at(self, index, value): + self.Package.insert(index, value) + def replace_Package_at(self, index, value): + self.Package[index] = value + def get_EstimatedWeight(self): + return self.EstimatedWeight + def set_EstimatedWeight(self, EstimatedWeight): + self.EstimatedWeight = EstimatedWeight + def get_PackageLocation(self): + return self.PackageLocation + def set_PackageLocation(self, PackageLocation): + self.PackageLocation = PackageLocation + def get_SpecialInstructions(self): + return self.SpecialInstructions + def set_SpecialInstructions(self, SpecialInstructions): + self.SpecialInstructions = SpecialInstructions + def get_ConfirmationNumber(self): + return self.ConfirmationNumber + def set_ConfirmationNumber(self, ConfirmationNumber): + self.ConfirmationNumber = ConfirmationNumber + def get_DayOfWeek(self): + return self.DayOfWeek + def set_DayOfWeek(self, DayOfWeek): + self.DayOfWeek = DayOfWeek + def get_Date(self): + return self.Date + def set_Date(self, Date): + self.Date = Date + def get_EmailAddress(self): + return self.EmailAddress + def set_EmailAddress(self, EmailAddress): + self.EmailAddress = EmailAddress + def has__content(self): + if ( + self.FirstName is not None or + self.LastName is not None or + self.FirmName is not None or + self.SuiteOrApt is not None or + self.Address2 is not None or + self.Urbanization is not None or + self.City is not None or + self.State is not None or + self.ZIP5 is not None or + self.ZIP4 is not None or + self.Phone is not None or + self.Extension is not None or + self.Package or + self.EstimatedWeight is not None or + self.PackageLocation is not None or + self.SpecialInstructions is not None or + self.ConfirmationNumber is not None or + self.DayOfWeek is not None or + self.Date is not None or + self.EmailAddress is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupInquiryResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CarrierPickupInquiryResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CarrierPickupInquiryResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CarrierPickupInquiryResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CarrierPickupInquiryResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CarrierPickupInquiryResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupInquiryResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FirstName is not None: + namespaceprefix_ = self.FirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirstName), input_name='FirstName')), namespaceprefix_ , eol_)) + if self.LastName is not None: + namespaceprefix_ = self.LastName_nsprefix_ + ':' if (UseCapturedNS_ and self.LastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LastName), input_name='LastName')), namespaceprefix_ , eol_)) + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.SuiteOrApt is not None: + namespaceprefix_ = self.SuiteOrApt_nsprefix_ + ':' if (UseCapturedNS_ and self.SuiteOrApt_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSuiteOrApt>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SuiteOrApt), input_name='SuiteOrApt')), namespaceprefix_ , eol_)) + if self.Address2 is not None: + namespaceprefix_ = self.Address2_nsprefix_ + ':' if (UseCapturedNS_ and self.Address2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address2), input_name='Address2')), namespaceprefix_ , eol_)) + if self.Urbanization is not None: + namespaceprefix_ = self.Urbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.Urbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Urbanization), input_name='Urbanization')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.ZIP5 is not None: + namespaceprefix_ = self.ZIP5_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP5), input_name='ZIP5')), namespaceprefix_ , eol_)) + if self.ZIP4 is not None: + namespaceprefix_ = self.ZIP4_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP4), input_name='ZIP4')), namespaceprefix_ , eol_)) + if self.Phone is not None: + namespaceprefix_ = self.Phone_nsprefix_ + ':' if (UseCapturedNS_ and self.Phone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Phone), input_name='Phone')), namespaceprefix_ , eol_)) + if self.Extension is not None: + namespaceprefix_ = self.Extension_nsprefix_ + ':' if (UseCapturedNS_ and self.Extension_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sExtension>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Extension), input_name='Extension')), namespaceprefix_ , eol_)) + for Package_ in self.Package: + namespaceprefix_ = self.Package_nsprefix_ + ':' if (UseCapturedNS_ and self.Package_nsprefix_) else '' + Package_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Package', pretty_print=pretty_print) + if self.EstimatedWeight is not None: + namespaceprefix_ = self.EstimatedWeight_nsprefix_ + ':' if (UseCapturedNS_ and self.EstimatedWeight_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEstimatedWeight>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EstimatedWeight), input_name='EstimatedWeight')), namespaceprefix_ , eol_)) + if self.PackageLocation is not None: + namespaceprefix_ = self.PackageLocation_nsprefix_ + ':' if (UseCapturedNS_ and self.PackageLocation_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPackageLocation>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PackageLocation), input_name='PackageLocation')), namespaceprefix_ , eol_)) + if self.SpecialInstructions is not None: + namespaceprefix_ = self.SpecialInstructions_nsprefix_ + ':' if (UseCapturedNS_ and self.SpecialInstructions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSpecialInstructions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SpecialInstructions), input_name='SpecialInstructions')), namespaceprefix_ , eol_)) + if self.ConfirmationNumber is not None: + namespaceprefix_ = self.ConfirmationNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.ConfirmationNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sConfirmationNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ConfirmationNumber), input_name='ConfirmationNumber')), namespaceprefix_ , eol_)) + if self.DayOfWeek is not None: + namespaceprefix_ = self.DayOfWeek_nsprefix_ + ':' if (UseCapturedNS_ and self.DayOfWeek_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDayOfWeek>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DayOfWeek), input_name='DayOfWeek')), namespaceprefix_ , eol_)) + if self.Date is not None: + namespaceprefix_ = self.Date_nsprefix_ + ':' if (UseCapturedNS_ and self.Date_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Date), input_name='Date')), namespaceprefix_ , eol_)) + if self.EmailAddress is not None: + namespaceprefix_ = self.EmailAddress_nsprefix_ + ':' if (UseCapturedNS_ and self.EmailAddress_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEmailAddress>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EmailAddress), input_name='EmailAddress')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirstName') + value_ = self.gds_validate_string(value_, node, 'FirstName') + self.FirstName = value_ + self.FirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'LastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LastName') + value_ = self.gds_validate_string(value_, node, 'LastName') + self.LastName = value_ + self.LastName_nsprefix_ = child_.prefix + elif nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'SuiteOrApt': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SuiteOrApt') + value_ = self.gds_validate_string(value_, node, 'SuiteOrApt') + self.SuiteOrApt = value_ + self.SuiteOrApt_nsprefix_ = child_.prefix + elif nodeName_ == 'Address2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address2') + value_ = self.gds_validate_string(value_, node, 'Address2') + self.Address2 = value_ + self.Address2_nsprefix_ = child_.prefix + elif nodeName_ == 'Urbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Urbanization') + value_ = self.gds_validate_string(value_, node, 'Urbanization') + self.Urbanization = value_ + self.Urbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP5') + value_ = self.gds_validate_string(value_, node, 'ZIP5') + self.ZIP5 = value_ + self.ZIP5_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP4') + value_ = self.gds_validate_string(value_, node, 'ZIP4') + self.ZIP4 = value_ + self.ZIP4_nsprefix_ = child_.prefix + elif nodeName_ == 'Phone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Phone') + value_ = self.gds_validate_string(value_, node, 'Phone') + self.Phone = value_ + self.Phone_nsprefix_ = child_.prefix + elif nodeName_ == 'Extension': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Extension') + value_ = self.gds_validate_string(value_, node, 'Extension') + self.Extension = value_ + self.Extension_nsprefix_ = child_.prefix + elif nodeName_ == 'Package': + obj_ = PackageType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Package.append(obj_) + obj_.original_tagname_ = 'Package' + elif nodeName_ == 'EstimatedWeight': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EstimatedWeight') + value_ = self.gds_validate_string(value_, node, 'EstimatedWeight') + self.EstimatedWeight = value_ + self.EstimatedWeight_nsprefix_ = child_.prefix + elif nodeName_ == 'PackageLocation': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PackageLocation') + value_ = self.gds_validate_string(value_, node, 'PackageLocation') + self.PackageLocation = value_ + self.PackageLocation_nsprefix_ = child_.prefix + elif nodeName_ == 'SpecialInstructions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SpecialInstructions') + value_ = self.gds_validate_string(value_, node, 'SpecialInstructions') + self.SpecialInstructions = value_ + self.SpecialInstructions_nsprefix_ = child_.prefix + elif nodeName_ == 'ConfirmationNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ConfirmationNumber') + value_ = self.gds_validate_string(value_, node, 'ConfirmationNumber') + self.ConfirmationNumber = value_ + self.ConfirmationNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'DayOfWeek': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DayOfWeek') + value_ = self.gds_validate_string(value_, node, 'DayOfWeek') + self.DayOfWeek = value_ + self.DayOfWeek_nsprefix_ = child_.prefix + elif nodeName_ == 'Date': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Date') + value_ = self.gds_validate_string(value_, node, 'Date') + self.Date = value_ + self.Date_nsprefix_ = child_.prefix + elif nodeName_ == 'EmailAddress': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EmailAddress') + value_ = self.gds_validate_string(value_, node, 'EmailAddress') + self.EmailAddress = value_ + self.EmailAddress_nsprefix_ = child_.prefix +# end class CarrierPickupInquiryResponse + + +class PackageType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ServiceType=None, Count=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ServiceType = ServiceType + self.ServiceType_nsprefix_ = None + self.Count = Count + self.Count_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PackageType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PackageType.subclass: + return PackageType.subclass(*args_, **kwargs_) + else: + return PackageType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ServiceType(self): + return self.ServiceType + def set_ServiceType(self, ServiceType): + self.ServiceType = ServiceType + def get_Count(self): + return self.Count + def set_Count(self, Count): + self.Count = Count + def has__content(self): + if ( + self.ServiceType is not None or + self.Count is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PackageType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PackageType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PackageType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PackageType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PackageType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ServiceType is not None: + namespaceprefix_ = self.ServiceType_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceType), input_name='ServiceType')), namespaceprefix_ , eol_)) + if self.Count is not None: + namespaceprefix_ = self.Count_nsprefix_ + ':' if (UseCapturedNS_ and self.Count_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCount>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Count, input_name='Count'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ServiceType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceType') + value_ = self.gds_validate_string(value_, node, 'ServiceType') + self.ServiceType = value_ + self.ServiceType_nsprefix_ = child_.prefix + elif nodeName_ == 'Count' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Count') + ival_ = self.gds_validate_integer(ival_, node, 'Count') + self.Count = ival_ + self.Count_nsprefix_ = child_.prefix +# end class PackageType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupInquiryResponse' + rootClass = CarrierPickupInquiryResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupInquiryResponse' + rootClass = CarrierPickupInquiryResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupInquiryResponse' + rootClass = CarrierPickupInquiryResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupInquiryResponse' + rootClass = CarrierPickupInquiryResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from carrier_pickup_inquiry_response import *\n\n') + sys.stdout.write('import carrier_pickup_inquiry_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CarrierPickupInquiryResponse", + "PackageType" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_schedule_request.py b/modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_schedule_request.py new file mode 100644 index 0000000000..b57f926c7b --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_schedule_request.py @@ -0,0 +1,1727 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:03 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/carrier_pickup_schedule_request.py') +# +# Command line arguments: +# ./schemas/CarrierPickupScheduleRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/carrier_pickup_schedule_request.py" ./schemas/CarrierPickupScheduleRequest.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class CarrierPickupScheduleRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, FirstName=None, LastName=None, FirmName=None, SuiteOrApt=None, Address2=None, Urbanization=None, City=None, State=None, ZIP5=None, ZIP4=None, Phone=None, Extension=None, Package=None, EstimatedWeight=None, PackageLocation=None, SpecialInstructions=None, EmailAddress=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.FirstName = FirstName + self.FirstName_nsprefix_ = None + self.LastName = LastName + self.LastName_nsprefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.SuiteOrApt = SuiteOrApt + self.SuiteOrApt_nsprefix_ = None + self.Address2 = Address2 + self.Address2_nsprefix_ = None + self.Urbanization = Urbanization + self.Urbanization_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.ZIP5 = ZIP5 + self.ZIP5_nsprefix_ = None + self.ZIP4 = ZIP4 + self.ZIP4_nsprefix_ = None + self.Phone = Phone + self.Phone_nsprefix_ = None + self.Extension = Extension + self.Extension_nsprefix_ = None + if Package is None: + self.Package = [] + else: + self.Package = Package + self.Package_nsprefix_ = None + self.EstimatedWeight = EstimatedWeight + self.EstimatedWeight_nsprefix_ = None + self.PackageLocation = PackageLocation + self.PackageLocation_nsprefix_ = None + self.SpecialInstructions = SpecialInstructions + self.SpecialInstructions_nsprefix_ = None + self.EmailAddress = EmailAddress + self.EmailAddress_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CarrierPickupScheduleRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CarrierPickupScheduleRequest.subclass: + return CarrierPickupScheduleRequest.subclass(*args_, **kwargs_) + else: + return CarrierPickupScheduleRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FirstName(self): + return self.FirstName + def set_FirstName(self, FirstName): + self.FirstName = FirstName + def get_LastName(self): + return self.LastName + def set_LastName(self, LastName): + self.LastName = LastName + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_SuiteOrApt(self): + return self.SuiteOrApt + def set_SuiteOrApt(self, SuiteOrApt): + self.SuiteOrApt = SuiteOrApt + def get_Address2(self): + return self.Address2 + def set_Address2(self, Address2): + self.Address2 = Address2 + def get_Urbanization(self): + return self.Urbanization + def set_Urbanization(self, Urbanization): + self.Urbanization = Urbanization + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_ZIP5(self): + return self.ZIP5 + def set_ZIP5(self, ZIP5): + self.ZIP5 = ZIP5 + def get_ZIP4(self): + return self.ZIP4 + def set_ZIP4(self, ZIP4): + self.ZIP4 = ZIP4 + def get_Phone(self): + return self.Phone + def set_Phone(self, Phone): + self.Phone = Phone + def get_Extension(self): + return self.Extension + def set_Extension(self, Extension): + self.Extension = Extension + def get_Package(self): + return self.Package + def set_Package(self, Package): + self.Package = Package + def add_Package(self, value): + self.Package.append(value) + def insert_Package_at(self, index, value): + self.Package.insert(index, value) + def replace_Package_at(self, index, value): + self.Package[index] = value + def get_EstimatedWeight(self): + return self.EstimatedWeight + def set_EstimatedWeight(self, EstimatedWeight): + self.EstimatedWeight = EstimatedWeight + def get_PackageLocation(self): + return self.PackageLocation + def set_PackageLocation(self, PackageLocation): + self.PackageLocation = PackageLocation + def get_SpecialInstructions(self): + return self.SpecialInstructions + def set_SpecialInstructions(self, SpecialInstructions): + self.SpecialInstructions = SpecialInstructions + def get_EmailAddress(self): + return self.EmailAddress + def set_EmailAddress(self, EmailAddress): + self.EmailAddress = EmailAddress + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.FirstName is not None or + self.LastName is not None or + self.FirmName is not None or + self.SuiteOrApt is not None or + self.Address2 is not None or + self.Urbanization is not None or + self.City is not None or + self.State is not None or + self.ZIP5 is not None or + self.ZIP4 is not None or + self.Phone is not None or + self.Extension is not None or + self.Package or + self.EstimatedWeight is not None or + self.PackageLocation is not None or + self.SpecialInstructions is not None or + self.EmailAddress is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupScheduleRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CarrierPickupScheduleRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CarrierPickupScheduleRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CarrierPickupScheduleRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CarrierPickupScheduleRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CarrierPickupScheduleRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupScheduleRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FirstName is not None: + namespaceprefix_ = self.FirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirstName), input_name='FirstName')), namespaceprefix_ , eol_)) + if self.LastName is not None: + namespaceprefix_ = self.LastName_nsprefix_ + ':' if (UseCapturedNS_ and self.LastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LastName), input_name='LastName')), namespaceprefix_ , eol_)) + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.SuiteOrApt is not None: + namespaceprefix_ = self.SuiteOrApt_nsprefix_ + ':' if (UseCapturedNS_ and self.SuiteOrApt_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSuiteOrApt>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SuiteOrApt), input_name='SuiteOrApt')), namespaceprefix_ , eol_)) + if self.Address2 is not None: + namespaceprefix_ = self.Address2_nsprefix_ + ':' if (UseCapturedNS_ and self.Address2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address2), input_name='Address2')), namespaceprefix_ , eol_)) + if self.Urbanization is not None: + namespaceprefix_ = self.Urbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.Urbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Urbanization), input_name='Urbanization')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.ZIP5 is not None: + namespaceprefix_ = self.ZIP5_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP5), input_name='ZIP5')), namespaceprefix_ , eol_)) + if self.ZIP4 is not None: + namespaceprefix_ = self.ZIP4_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP4), input_name='ZIP4')), namespaceprefix_ , eol_)) + if self.Phone is not None: + namespaceprefix_ = self.Phone_nsprefix_ + ':' if (UseCapturedNS_ and self.Phone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Phone), input_name='Phone')), namespaceprefix_ , eol_)) + if self.Extension is not None: + namespaceprefix_ = self.Extension_nsprefix_ + ':' if (UseCapturedNS_ and self.Extension_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sExtension>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Extension), input_name='Extension')), namespaceprefix_ , eol_)) + for Package_ in self.Package: + namespaceprefix_ = self.Package_nsprefix_ + ':' if (UseCapturedNS_ and self.Package_nsprefix_) else '' + Package_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Package', pretty_print=pretty_print) + if self.EstimatedWeight is not None: + namespaceprefix_ = self.EstimatedWeight_nsprefix_ + ':' if (UseCapturedNS_ and self.EstimatedWeight_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEstimatedWeight>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EstimatedWeight), input_name='EstimatedWeight')), namespaceprefix_ , eol_)) + if self.PackageLocation is not None: + namespaceprefix_ = self.PackageLocation_nsprefix_ + ':' if (UseCapturedNS_ and self.PackageLocation_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPackageLocation>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PackageLocation), input_name='PackageLocation')), namespaceprefix_ , eol_)) + if self.SpecialInstructions is not None: + namespaceprefix_ = self.SpecialInstructions_nsprefix_ + ':' if (UseCapturedNS_ and self.SpecialInstructions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSpecialInstructions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SpecialInstructions), input_name='SpecialInstructions')), namespaceprefix_ , eol_)) + if self.EmailAddress is not None: + namespaceprefix_ = self.EmailAddress_nsprefix_ + ':' if (UseCapturedNS_ and self.EmailAddress_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEmailAddress>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EmailAddress), input_name='EmailAddress')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirstName') + value_ = self.gds_validate_string(value_, node, 'FirstName') + self.FirstName = value_ + self.FirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'LastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LastName') + value_ = self.gds_validate_string(value_, node, 'LastName') + self.LastName = value_ + self.LastName_nsprefix_ = child_.prefix + elif nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'SuiteOrApt': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SuiteOrApt') + value_ = self.gds_validate_string(value_, node, 'SuiteOrApt') + self.SuiteOrApt = value_ + self.SuiteOrApt_nsprefix_ = child_.prefix + elif nodeName_ == 'Address2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address2') + value_ = self.gds_validate_string(value_, node, 'Address2') + self.Address2 = value_ + self.Address2_nsprefix_ = child_.prefix + elif nodeName_ == 'Urbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Urbanization') + value_ = self.gds_validate_string(value_, node, 'Urbanization') + self.Urbanization = value_ + self.Urbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP5') + value_ = self.gds_validate_string(value_, node, 'ZIP5') + self.ZIP5 = value_ + self.ZIP5_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP4') + value_ = self.gds_validate_string(value_, node, 'ZIP4') + self.ZIP4 = value_ + self.ZIP4_nsprefix_ = child_.prefix + elif nodeName_ == 'Phone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Phone') + value_ = self.gds_validate_string(value_, node, 'Phone') + self.Phone = value_ + self.Phone_nsprefix_ = child_.prefix + elif nodeName_ == 'Extension': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Extension') + value_ = self.gds_validate_string(value_, node, 'Extension') + self.Extension = value_ + self.Extension_nsprefix_ = child_.prefix + elif nodeName_ == 'Package': + obj_ = PackageType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Package.append(obj_) + obj_.original_tagname_ = 'Package' + elif nodeName_ == 'EstimatedWeight': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EstimatedWeight') + value_ = self.gds_validate_string(value_, node, 'EstimatedWeight') + self.EstimatedWeight = value_ + self.EstimatedWeight_nsprefix_ = child_.prefix + elif nodeName_ == 'PackageLocation': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PackageLocation') + value_ = self.gds_validate_string(value_, node, 'PackageLocation') + self.PackageLocation = value_ + self.PackageLocation_nsprefix_ = child_.prefix + elif nodeName_ == 'SpecialInstructions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SpecialInstructions') + value_ = self.gds_validate_string(value_, node, 'SpecialInstructions') + self.SpecialInstructions = value_ + self.SpecialInstructions_nsprefix_ = child_.prefix + elif nodeName_ == 'EmailAddress': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EmailAddress') + value_ = self.gds_validate_string(value_, node, 'EmailAddress') + self.EmailAddress = value_ + self.EmailAddress_nsprefix_ = child_.prefix +# end class CarrierPickupScheduleRequest + + +class PackageType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ServiceType=None, Count=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ServiceType = ServiceType + self.ServiceType_nsprefix_ = None + self.Count = Count + self.Count_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PackageType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PackageType.subclass: + return PackageType.subclass(*args_, **kwargs_) + else: + return PackageType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ServiceType(self): + return self.ServiceType + def set_ServiceType(self, ServiceType): + self.ServiceType = ServiceType + def get_Count(self): + return self.Count + def set_Count(self, Count): + self.Count = Count + def has__content(self): + if ( + self.ServiceType is not None or + self.Count is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PackageType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PackageType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PackageType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PackageType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PackageType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ServiceType is not None: + namespaceprefix_ = self.ServiceType_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceType), input_name='ServiceType')), namespaceprefix_ , eol_)) + if self.Count is not None: + namespaceprefix_ = self.Count_nsprefix_ + ':' if (UseCapturedNS_ and self.Count_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCount>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Count, input_name='Count'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ServiceType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceType') + value_ = self.gds_validate_string(value_, node, 'ServiceType') + self.ServiceType = value_ + self.ServiceType_nsprefix_ = child_.prefix + elif nodeName_ == 'Count' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Count') + ival_ = self.gds_validate_integer(ival_, node, 'Count') + self.Count = ival_ + self.Count_nsprefix_ = child_.prefix +# end class PackageType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupScheduleRequest' + rootClass = CarrierPickupScheduleRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupScheduleRequest' + rootClass = CarrierPickupScheduleRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupScheduleRequest' + rootClass = CarrierPickupScheduleRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupScheduleRequest' + rootClass = CarrierPickupScheduleRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from carrier_pickup_schedule_request import *\n\n') + sys.stdout.write('import carrier_pickup_schedule_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CarrierPickupScheduleRequest", + "PackageType" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_schedule_response.py b/modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_schedule_response.py new file mode 100644 index 0000000000..fd66b2f41d --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/carrier_pickup_schedule_response.py @@ -0,0 +1,1754 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:03 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/carrier_pickup_schedule_response.py') +# +# Command line arguments: +# ./schemas/CarrierPickupScheduleResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/carrier_pickup_schedule_response.py" ./schemas/CarrierPickupScheduleResponse.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class CarrierPickupScheduleResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, FirstName=None, LastName=None, FirmName=None, SuiteOrApt=None, Address2=None, Urbanization=None, City=None, State=None, ZIP5=None, ZIP4=None, Phone=None, Extension=None, Package=None, EstimatedWeight=None, PackageLocation=None, SpecialInstructions=None, ConfirmationNumber=None, DayOfWeek=None, Date=None, CarrierRoute=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.FirstName = FirstName + self.FirstName_nsprefix_ = None + self.LastName = LastName + self.LastName_nsprefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.SuiteOrApt = SuiteOrApt + self.SuiteOrApt_nsprefix_ = None + self.Address2 = Address2 + self.Address2_nsprefix_ = None + self.Urbanization = Urbanization + self.Urbanization_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.ZIP5 = ZIP5 + self.ZIP5_nsprefix_ = None + self.ZIP4 = ZIP4 + self.ZIP4_nsprefix_ = None + self.Phone = Phone + self.Phone_nsprefix_ = None + self.Extension = Extension + self.Extension_nsprefix_ = None + if Package is None: + self.Package = [] + else: + self.Package = Package + self.Package_nsprefix_ = None + self.EstimatedWeight = EstimatedWeight + self.EstimatedWeight_nsprefix_ = None + self.PackageLocation = PackageLocation + self.PackageLocation_nsprefix_ = None + self.SpecialInstructions = SpecialInstructions + self.SpecialInstructions_nsprefix_ = None + self.ConfirmationNumber = ConfirmationNumber + self.ConfirmationNumber_nsprefix_ = None + self.DayOfWeek = DayOfWeek + self.DayOfWeek_nsprefix_ = None + self.Date = Date + self.Date_nsprefix_ = None + self.CarrierRoute = CarrierRoute + self.CarrierRoute_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CarrierPickupScheduleResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CarrierPickupScheduleResponse.subclass: + return CarrierPickupScheduleResponse.subclass(*args_, **kwargs_) + else: + return CarrierPickupScheduleResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FirstName(self): + return self.FirstName + def set_FirstName(self, FirstName): + self.FirstName = FirstName + def get_LastName(self): + return self.LastName + def set_LastName(self, LastName): + self.LastName = LastName + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_SuiteOrApt(self): + return self.SuiteOrApt + def set_SuiteOrApt(self, SuiteOrApt): + self.SuiteOrApt = SuiteOrApt + def get_Address2(self): + return self.Address2 + def set_Address2(self, Address2): + self.Address2 = Address2 + def get_Urbanization(self): + return self.Urbanization + def set_Urbanization(self, Urbanization): + self.Urbanization = Urbanization + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_ZIP5(self): + return self.ZIP5 + def set_ZIP5(self, ZIP5): + self.ZIP5 = ZIP5 + def get_ZIP4(self): + return self.ZIP4 + def set_ZIP4(self, ZIP4): + self.ZIP4 = ZIP4 + def get_Phone(self): + return self.Phone + def set_Phone(self, Phone): + self.Phone = Phone + def get_Extension(self): + return self.Extension + def set_Extension(self, Extension): + self.Extension = Extension + def get_Package(self): + return self.Package + def set_Package(self, Package): + self.Package = Package + def add_Package(self, value): + self.Package.append(value) + def insert_Package_at(self, index, value): + self.Package.insert(index, value) + def replace_Package_at(self, index, value): + self.Package[index] = value + def get_EstimatedWeight(self): + return self.EstimatedWeight + def set_EstimatedWeight(self, EstimatedWeight): + self.EstimatedWeight = EstimatedWeight + def get_PackageLocation(self): + return self.PackageLocation + def set_PackageLocation(self, PackageLocation): + self.PackageLocation = PackageLocation + def get_SpecialInstructions(self): + return self.SpecialInstructions + def set_SpecialInstructions(self, SpecialInstructions): + self.SpecialInstructions = SpecialInstructions + def get_ConfirmationNumber(self): + return self.ConfirmationNumber + def set_ConfirmationNumber(self, ConfirmationNumber): + self.ConfirmationNumber = ConfirmationNumber + def get_DayOfWeek(self): + return self.DayOfWeek + def set_DayOfWeek(self, DayOfWeek): + self.DayOfWeek = DayOfWeek + def get_Date(self): + return self.Date + def set_Date(self, Date): + self.Date = Date + def get_CarrierRoute(self): + return self.CarrierRoute + def set_CarrierRoute(self, CarrierRoute): + self.CarrierRoute = CarrierRoute + def has__content(self): + if ( + self.FirstName is not None or + self.LastName is not None or + self.FirmName is not None or + self.SuiteOrApt is not None or + self.Address2 is not None or + self.Urbanization is not None or + self.City is not None or + self.State is not None or + self.ZIP5 is not None or + self.ZIP4 is not None or + self.Phone is not None or + self.Extension is not None or + self.Package or + self.EstimatedWeight is not None or + self.PackageLocation is not None or + self.SpecialInstructions is not None or + self.ConfirmationNumber is not None or + self.DayOfWeek is not None or + self.Date is not None or + self.CarrierRoute is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupScheduleResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CarrierPickupScheduleResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CarrierPickupScheduleResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CarrierPickupScheduleResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CarrierPickupScheduleResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CarrierPickupScheduleResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierPickupScheduleResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FirstName is not None: + namespaceprefix_ = self.FirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirstName), input_name='FirstName')), namespaceprefix_ , eol_)) + if self.LastName is not None: + namespaceprefix_ = self.LastName_nsprefix_ + ':' if (UseCapturedNS_ and self.LastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LastName), input_name='LastName')), namespaceprefix_ , eol_)) + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.SuiteOrApt is not None: + namespaceprefix_ = self.SuiteOrApt_nsprefix_ + ':' if (UseCapturedNS_ and self.SuiteOrApt_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSuiteOrApt>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SuiteOrApt), input_name='SuiteOrApt')), namespaceprefix_ , eol_)) + if self.Address2 is not None: + namespaceprefix_ = self.Address2_nsprefix_ + ':' if (UseCapturedNS_ and self.Address2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address2), input_name='Address2')), namespaceprefix_ , eol_)) + if self.Urbanization is not None: + namespaceprefix_ = self.Urbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.Urbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Urbanization), input_name='Urbanization')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.ZIP5 is not None: + namespaceprefix_ = self.ZIP5_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP5), input_name='ZIP5')), namespaceprefix_ , eol_)) + if self.ZIP4 is not None: + namespaceprefix_ = self.ZIP4_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZIP4), input_name='ZIP4')), namespaceprefix_ , eol_)) + if self.Phone is not None: + namespaceprefix_ = self.Phone_nsprefix_ + ':' if (UseCapturedNS_ and self.Phone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Phone), input_name='Phone')), namespaceprefix_ , eol_)) + if self.Extension is not None: + namespaceprefix_ = self.Extension_nsprefix_ + ':' if (UseCapturedNS_ and self.Extension_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sExtension>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Extension), input_name='Extension')), namespaceprefix_ , eol_)) + for Package_ in self.Package: + namespaceprefix_ = self.Package_nsprefix_ + ':' if (UseCapturedNS_ and self.Package_nsprefix_) else '' + Package_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Package', pretty_print=pretty_print) + if self.EstimatedWeight is not None: + namespaceprefix_ = self.EstimatedWeight_nsprefix_ + ':' if (UseCapturedNS_ and self.EstimatedWeight_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEstimatedWeight>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EstimatedWeight), input_name='EstimatedWeight')), namespaceprefix_ , eol_)) + if self.PackageLocation is not None: + namespaceprefix_ = self.PackageLocation_nsprefix_ + ':' if (UseCapturedNS_ and self.PackageLocation_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPackageLocation>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PackageLocation), input_name='PackageLocation')), namespaceprefix_ , eol_)) + if self.SpecialInstructions is not None: + namespaceprefix_ = self.SpecialInstructions_nsprefix_ + ':' if (UseCapturedNS_ and self.SpecialInstructions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSpecialInstructions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SpecialInstructions), input_name='SpecialInstructions')), namespaceprefix_ , eol_)) + if self.ConfirmationNumber is not None: + namespaceprefix_ = self.ConfirmationNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.ConfirmationNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sConfirmationNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ConfirmationNumber), input_name='ConfirmationNumber')), namespaceprefix_ , eol_)) + if self.DayOfWeek is not None: + namespaceprefix_ = self.DayOfWeek_nsprefix_ + ':' if (UseCapturedNS_ and self.DayOfWeek_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDayOfWeek>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DayOfWeek), input_name='DayOfWeek')), namespaceprefix_ , eol_)) + if self.Date is not None: + namespaceprefix_ = self.Date_nsprefix_ + ':' if (UseCapturedNS_ and self.Date_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Date), input_name='Date')), namespaceprefix_ , eol_)) + if self.CarrierRoute is not None: + namespaceprefix_ = self.CarrierRoute_nsprefix_ + ':' if (UseCapturedNS_ and self.CarrierRoute_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCarrierRoute>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CarrierRoute), input_name='CarrierRoute')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirstName') + value_ = self.gds_validate_string(value_, node, 'FirstName') + self.FirstName = value_ + self.FirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'LastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LastName') + value_ = self.gds_validate_string(value_, node, 'LastName') + self.LastName = value_ + self.LastName_nsprefix_ = child_.prefix + elif nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'SuiteOrApt': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SuiteOrApt') + value_ = self.gds_validate_string(value_, node, 'SuiteOrApt') + self.SuiteOrApt = value_ + self.SuiteOrApt_nsprefix_ = child_.prefix + elif nodeName_ == 'Address2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address2') + value_ = self.gds_validate_string(value_, node, 'Address2') + self.Address2 = value_ + self.Address2_nsprefix_ = child_.prefix + elif nodeName_ == 'Urbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Urbanization') + value_ = self.gds_validate_string(value_, node, 'Urbanization') + self.Urbanization = value_ + self.Urbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP5') + value_ = self.gds_validate_string(value_, node, 'ZIP5') + self.ZIP5 = value_ + self.ZIP5_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZIP4') + value_ = self.gds_validate_string(value_, node, 'ZIP4') + self.ZIP4 = value_ + self.ZIP4_nsprefix_ = child_.prefix + elif nodeName_ == 'Phone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Phone') + value_ = self.gds_validate_string(value_, node, 'Phone') + self.Phone = value_ + self.Phone_nsprefix_ = child_.prefix + elif nodeName_ == 'Extension': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Extension') + value_ = self.gds_validate_string(value_, node, 'Extension') + self.Extension = value_ + self.Extension_nsprefix_ = child_.prefix + elif nodeName_ == 'Package': + obj_ = PackageType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Package.append(obj_) + obj_.original_tagname_ = 'Package' + elif nodeName_ == 'EstimatedWeight': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EstimatedWeight') + value_ = self.gds_validate_string(value_, node, 'EstimatedWeight') + self.EstimatedWeight = value_ + self.EstimatedWeight_nsprefix_ = child_.prefix + elif nodeName_ == 'PackageLocation': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PackageLocation') + value_ = self.gds_validate_string(value_, node, 'PackageLocation') + self.PackageLocation = value_ + self.PackageLocation_nsprefix_ = child_.prefix + elif nodeName_ == 'SpecialInstructions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SpecialInstructions') + value_ = self.gds_validate_string(value_, node, 'SpecialInstructions') + self.SpecialInstructions = value_ + self.SpecialInstructions_nsprefix_ = child_.prefix + elif nodeName_ == 'ConfirmationNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ConfirmationNumber') + value_ = self.gds_validate_string(value_, node, 'ConfirmationNumber') + self.ConfirmationNumber = value_ + self.ConfirmationNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'DayOfWeek': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DayOfWeek') + value_ = self.gds_validate_string(value_, node, 'DayOfWeek') + self.DayOfWeek = value_ + self.DayOfWeek_nsprefix_ = child_.prefix + elif nodeName_ == 'Date': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Date') + value_ = self.gds_validate_string(value_, node, 'Date') + self.Date = value_ + self.Date_nsprefix_ = child_.prefix + elif nodeName_ == 'CarrierRoute': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CarrierRoute') + value_ = self.gds_validate_string(value_, node, 'CarrierRoute') + self.CarrierRoute = value_ + self.CarrierRoute_nsprefix_ = child_.prefix +# end class CarrierPickupScheduleResponse + + +class PackageType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ServiceType=None, Count=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ServiceType = ServiceType + self.ServiceType_nsprefix_ = None + self.Count = Count + self.Count_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PackageType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PackageType.subclass: + return PackageType.subclass(*args_, **kwargs_) + else: + return PackageType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ServiceType(self): + return self.ServiceType + def set_ServiceType(self, ServiceType): + self.ServiceType = ServiceType + def get_Count(self): + return self.Count + def set_Count(self, Count): + self.Count = Count + def has__content(self): + if ( + self.ServiceType is not None or + self.Count is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PackageType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PackageType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PackageType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PackageType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PackageType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ServiceType is not None: + namespaceprefix_ = self.ServiceType_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceType), input_name='ServiceType')), namespaceprefix_ , eol_)) + if self.Count is not None: + namespaceprefix_ = self.Count_nsprefix_ + ':' if (UseCapturedNS_ and self.Count_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCount>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Count, input_name='Count'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ServiceType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceType') + value_ = self.gds_validate_string(value_, node, 'ServiceType') + self.ServiceType = value_ + self.ServiceType_nsprefix_ = child_.prefix + elif nodeName_ == 'Count' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Count') + ival_ = self.gds_validate_integer(ival_, node, 'Count') + self.Count = ival_ + self.Count_nsprefix_ = child_.prefix +# end class PackageType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupScheduleResponse' + rootClass = CarrierPickupScheduleResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupScheduleResponse' + rootClass = CarrierPickupScheduleResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupScheduleResponse' + rootClass = CarrierPickupScheduleResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CarrierPickupScheduleResponse' + rootClass = CarrierPickupScheduleResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from carrier_pickup_schedule_response import *\n\n') + sys.stdout.write('import carrier_pickup_schedule_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CarrierPickupScheduleResponse", + "PackageType" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/city_state_lookup_request.py b/modules/connectors/usps_international/karrio/schemas/usps_international/city_state_lookup_request.py new file mode 100644 index 0000000000..bc486dc54f --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/city_state_lookup_request.py @@ -0,0 +1,1440 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:03 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/city_state_lookup_request.py') +# +# Command line arguments: +# ./schemas/CityStateLookupRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/city_state_lookup_request.py" ./schemas/CityStateLookupRequest.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class CityStateLookupRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, ZipCode=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.ZipCode = ZipCode + self.ZipCode_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CityStateLookupRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CityStateLookupRequest.subclass: + return CityStateLookupRequest.subclass(*args_, **kwargs_) + else: + return CityStateLookupRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ZipCode(self): + return self.ZipCode + def set_ZipCode(self, ZipCode): + self.ZipCode = ZipCode + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.ZipCode is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CityStateLookupRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CityStateLookupRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CityStateLookupRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CityStateLookupRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CityStateLookupRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CityStateLookupRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CityStateLookupRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ZipCode is not None: + namespaceprefix_ = self.ZipCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ZipCode_nsprefix_) else '' + self.ZipCode.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ZipCode', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ZipCode': + obj_ = ZipCodeType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ZipCode = obj_ + obj_.original_tagname_ = 'ZipCode' +# end class CityStateLookupRequest + + +class ZipCodeType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ID=None, Zip5=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ID = _cast(int, ID) + self.ID_nsprefix_ = None + self.Zip5 = Zip5 + self.Zip5_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ZipCodeType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ZipCodeType.subclass: + return ZipCodeType.subclass(*args_, **kwargs_) + else: + return ZipCodeType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Zip5(self): + return self.Zip5 + def set_Zip5(self, Zip5): + self.Zip5 = Zip5 + def get_ID(self): + return self.ID + def set_ID(self, ID): + self.ID = ID + def has__content(self): + if ( + self.Zip5 is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ZipCodeType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ZipCodeType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ZipCodeType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ZipCodeType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ZipCodeType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ZipCodeType'): + if self.ID is not None and 'ID' not in already_processed: + already_processed.add('ID') + outfile.write(' ID="%s"' % self.gds_format_integer(self.ID, input_name='ID')) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ZipCodeType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Zip5 is not None: + namespaceprefix_ = self.Zip5_nsprefix_ + ':' if (UseCapturedNS_ and self.Zip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZip5>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Zip5, input_name='Zip5'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ID', node) + if value is not None and 'ID' not in already_processed: + already_processed.add('ID') + self.ID = self.gds_parse_integer(value, node, 'ID') + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Zip5' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Zip5') + ival_ = self.gds_validate_integer(ival_, node, 'Zip5') + self.Zip5 = ival_ + self.Zip5_nsprefix_ = child_.prefix +# end class ZipCodeType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CityStateLookupRequest' + rootClass = CityStateLookupRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CityStateLookupRequest' + rootClass = CityStateLookupRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CityStateLookupRequest' + rootClass = CityStateLookupRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CityStateLookupRequest' + rootClass = CityStateLookupRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from city_state_lookup_request import *\n\n') + sys.stdout.write('import city_state_lookup_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CityStateLookupRequest", + "ZipCodeType" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/city_state_lookup_response.py b/modules/connectors/usps_international/karrio/schemas/usps_international/city_state_lookup_response.py new file mode 100644 index 0000000000..5feb721151 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/city_state_lookup_response.py @@ -0,0 +1,1459 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:03 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/city_state_lookup_response.py') +# +# Command line arguments: +# ./schemas/CityStateLookupResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/city_state_lookup_response.py" ./schemas/CityStateLookupResponse.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class CityStateLookupResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ZipCode=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if ZipCode is None: + self.ZipCode = [] + else: + self.ZipCode = ZipCode + self.ZipCode_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CityStateLookupResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CityStateLookupResponse.subclass: + return CityStateLookupResponse.subclass(*args_, **kwargs_) + else: + return CityStateLookupResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ZipCode(self): + return self.ZipCode + def set_ZipCode(self, ZipCode): + self.ZipCode = ZipCode + def add_ZipCode(self, value): + self.ZipCode.append(value) + def insert_ZipCode_at(self, index, value): + self.ZipCode.insert(index, value) + def replace_ZipCode_at(self, index, value): + self.ZipCode[index] = value + def has__content(self): + if ( + self.ZipCode + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CityStateLookupResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CityStateLookupResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CityStateLookupResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CityStateLookupResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CityStateLookupResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CityStateLookupResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CityStateLookupResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for ZipCode_ in self.ZipCode: + namespaceprefix_ = self.ZipCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ZipCode_nsprefix_) else '' + ZipCode_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ZipCode', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ZipCode': + obj_ = ZipCodeType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ZipCode.append(obj_) + obj_.original_tagname_ = 'ZipCode' +# end class CityStateLookupResponse + + +class ZipCodeType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ID=None, Zip5=None, City=None, State=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ID = _cast(int, ID) + self.ID_nsprefix_ = None + self.Zip5 = Zip5 + self.Zip5_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ZipCodeType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ZipCodeType.subclass: + return ZipCodeType.subclass(*args_, **kwargs_) + else: + return ZipCodeType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Zip5(self): + return self.Zip5 + def set_Zip5(self, Zip5): + self.Zip5 = Zip5 + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_ID(self): + return self.ID + def set_ID(self, ID): + self.ID = ID + def has__content(self): + if ( + self.Zip5 is not None or + self.City is not None or + self.State is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ZipCodeType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ZipCodeType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ZipCodeType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ZipCodeType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ZipCodeType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ZipCodeType'): + if self.ID is not None and 'ID' not in already_processed: + already_processed.add('ID') + outfile.write(' ID="%s"' % self.gds_format_integer(self.ID, input_name='ID')) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ZipCodeType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Zip5 is not None: + namespaceprefix_ = self.Zip5_nsprefix_ + ':' if (UseCapturedNS_ and self.Zip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZip5>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Zip5, input_name='Zip5'), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ID', node) + if value is not None and 'ID' not in already_processed: + already_processed.add('ID') + self.ID = self.gds_parse_integer(value, node, 'ID') + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Zip5' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Zip5') + ival_ = self.gds_validate_integer(ival_, node, 'Zip5') + self.Zip5 = ival_ + self.Zip5_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix +# end class ZipCodeType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CityStateLookupResponse' + rootClass = CityStateLookupResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CityStateLookupResponse' + rootClass = CityStateLookupResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CityStateLookupResponse' + rootClass = CityStateLookupResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'CityStateLookupResponse' + rootClass = CityStateLookupResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from city_state_lookup_response import *\n\n') + sys.stdout.write('import city_state_lookup_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CityStateLookupResponse", + "ZipCodeType" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/emrsv4_0_bulk_request.py b/modules/connectors/usps_international/karrio/schemas/usps_international/emrsv4_0_bulk_request.py new file mode 100644 index 0000000000..81c8d7f2d4 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/emrsv4_0_bulk_request.py @@ -0,0 +1,1846 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:03 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/emrsv4_0_bulk_request.py') +# +# Command line arguments: +# ./schemas/EMRSV4.0BulkRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/emrsv4_0_bulk_request.py" ./schemas/EMRSV4.0BulkRequest.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class EMRSV4_0BulkRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, Option=None, LabelCount=None, ImageParameters=None, RetailerName=None, RetailerAddress=None, PermitNumber=None, PermitIssuingPOCity=None, PermitIssuingPOState=None, PermitIssuingPOZip5=None, PDUFirmName=None, PDUPOBox=None, PDUCity=None, PDUState=None, PDUZip5=None, PDUZip4=None, ServiceType=None, DeliveryConfirmation=None, InsuranceValue=None, MailingAckPackageID=None, WeightInPounds=None, WeightInOunces=None, RMA=None, RMAPICFlag=None, ImageType=None, RMABarcode=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.Option = Option + self.Option_nsprefix_ = None + self.LabelCount = LabelCount + self.LabelCount_nsprefix_ = None + self.ImageParameters = ImageParameters + self.ImageParameters_nsprefix_ = None + self.RetailerName = RetailerName + self.RetailerName_nsprefix_ = None + self.RetailerAddress = RetailerAddress + self.RetailerAddress_nsprefix_ = None + self.PermitNumber = PermitNumber + self.PermitNumber_nsprefix_ = None + self.PermitIssuingPOCity = PermitIssuingPOCity + self.PermitIssuingPOCity_nsprefix_ = None + self.PermitIssuingPOState = PermitIssuingPOState + self.PermitIssuingPOState_nsprefix_ = None + self.PermitIssuingPOZip5 = PermitIssuingPOZip5 + self.PermitIssuingPOZip5_nsprefix_ = None + self.PDUFirmName = PDUFirmName + self.PDUFirmName_nsprefix_ = None + self.PDUPOBox = PDUPOBox + self.PDUPOBox_nsprefix_ = None + self.PDUCity = PDUCity + self.PDUCity_nsprefix_ = None + self.PDUState = PDUState + self.PDUState_nsprefix_ = None + self.PDUZip5 = PDUZip5 + self.PDUZip5_nsprefix_ = None + self.PDUZip4 = PDUZip4 + self.PDUZip4_nsprefix_ = None + self.ServiceType = ServiceType + self.ServiceType_nsprefix_ = None + self.DeliveryConfirmation = DeliveryConfirmation + self.DeliveryConfirmation_nsprefix_ = None + self.InsuranceValue = InsuranceValue + self.InsuranceValue_nsprefix_ = None + self.MailingAckPackageID = MailingAckPackageID + self.MailingAckPackageID_nsprefix_ = None + self.WeightInPounds = WeightInPounds + self.WeightInPounds_nsprefix_ = None + self.WeightInOunces = WeightInOunces + self.WeightInOunces_nsprefix_ = None + self.RMA = RMA + self.RMA_nsprefix_ = None + self.RMAPICFlag = RMAPICFlag + self.RMAPICFlag_nsprefix_ = None + self.ImageType = ImageType + self.ImageType_nsprefix_ = None + self.RMABarcode = RMABarcode + self.RMABarcode_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, EMRSV4_0BulkRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if EMRSV4_0BulkRequest.subclass: + return EMRSV4_0BulkRequest.subclass(*args_, **kwargs_) + else: + return EMRSV4_0BulkRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Option(self): + return self.Option + def set_Option(self, Option): + self.Option = Option + def get_LabelCount(self): + return self.LabelCount + def set_LabelCount(self, LabelCount): + self.LabelCount = LabelCount + def get_ImageParameters(self): + return self.ImageParameters + def set_ImageParameters(self, ImageParameters): + self.ImageParameters = ImageParameters + def get_RetailerName(self): + return self.RetailerName + def set_RetailerName(self, RetailerName): + self.RetailerName = RetailerName + def get_RetailerAddress(self): + return self.RetailerAddress + def set_RetailerAddress(self, RetailerAddress): + self.RetailerAddress = RetailerAddress + def get_PermitNumber(self): + return self.PermitNumber + def set_PermitNumber(self, PermitNumber): + self.PermitNumber = PermitNumber + def get_PermitIssuingPOCity(self): + return self.PermitIssuingPOCity + def set_PermitIssuingPOCity(self, PermitIssuingPOCity): + self.PermitIssuingPOCity = PermitIssuingPOCity + def get_PermitIssuingPOState(self): + return self.PermitIssuingPOState + def set_PermitIssuingPOState(self, PermitIssuingPOState): + self.PermitIssuingPOState = PermitIssuingPOState + def get_PermitIssuingPOZip5(self): + return self.PermitIssuingPOZip5 + def set_PermitIssuingPOZip5(self, PermitIssuingPOZip5): + self.PermitIssuingPOZip5 = PermitIssuingPOZip5 + def get_PDUFirmName(self): + return self.PDUFirmName + def set_PDUFirmName(self, PDUFirmName): + self.PDUFirmName = PDUFirmName + def get_PDUPOBox(self): + return self.PDUPOBox + def set_PDUPOBox(self, PDUPOBox): + self.PDUPOBox = PDUPOBox + def get_PDUCity(self): + return self.PDUCity + def set_PDUCity(self, PDUCity): + self.PDUCity = PDUCity + def get_PDUState(self): + return self.PDUState + def set_PDUState(self, PDUState): + self.PDUState = PDUState + def get_PDUZip5(self): + return self.PDUZip5 + def set_PDUZip5(self, PDUZip5): + self.PDUZip5 = PDUZip5 + def get_PDUZip4(self): + return self.PDUZip4 + def set_PDUZip4(self, PDUZip4): + self.PDUZip4 = PDUZip4 + def get_ServiceType(self): + return self.ServiceType + def set_ServiceType(self, ServiceType): + self.ServiceType = ServiceType + def get_DeliveryConfirmation(self): + return self.DeliveryConfirmation + def set_DeliveryConfirmation(self, DeliveryConfirmation): + self.DeliveryConfirmation = DeliveryConfirmation + def get_InsuranceValue(self): + return self.InsuranceValue + def set_InsuranceValue(self, InsuranceValue): + self.InsuranceValue = InsuranceValue + def get_MailingAckPackageID(self): + return self.MailingAckPackageID + def set_MailingAckPackageID(self, MailingAckPackageID): + self.MailingAckPackageID = MailingAckPackageID + def get_WeightInPounds(self): + return self.WeightInPounds + def set_WeightInPounds(self, WeightInPounds): + self.WeightInPounds = WeightInPounds + def get_WeightInOunces(self): + return self.WeightInOunces + def set_WeightInOunces(self, WeightInOunces): + self.WeightInOunces = WeightInOunces + def get_RMA(self): + return self.RMA + def set_RMA(self, RMA): + self.RMA = RMA + def get_RMAPICFlag(self): + return self.RMAPICFlag + def set_RMAPICFlag(self, RMAPICFlag): + self.RMAPICFlag = RMAPICFlag + def get_ImageType(self): + return self.ImageType + def set_ImageType(self, ImageType): + self.ImageType = ImageType + def get_RMABarcode(self): + return self.RMABarcode + def set_RMABarcode(self, RMABarcode): + self.RMABarcode = RMABarcode + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.Option is not None or + self.LabelCount is not None or + self.ImageParameters is not None or + self.RetailerName is not None or + self.RetailerAddress is not None or + self.PermitNumber is not None or + self.PermitIssuingPOCity is not None or + self.PermitIssuingPOState is not None or + self.PermitIssuingPOZip5 is not None or + self.PDUFirmName is not None or + self.PDUPOBox is not None or + self.PDUCity is not None or + self.PDUState is not None or + self.PDUZip5 is not None or + self.PDUZip4 is not None or + self.ServiceType is not None or + self.DeliveryConfirmation is not None or + self.InsuranceValue is not None or + self.MailingAckPackageID is not None or + self.WeightInPounds is not None or + self.WeightInOunces is not None or + self.RMA is not None or + self.RMAPICFlag is not None or + self.ImageType is not None or + self.RMABarcode is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='EMRSV4.0BulkRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('EMRSV4.0BulkRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'EMRSV4.0BulkRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='EMRSV4.0BulkRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='EMRSV4.0BulkRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='EMRSV4.0BulkRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='EMRSV4.0BulkRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Option is not None: + namespaceprefix_ = self.Option_nsprefix_ + ':' if (UseCapturedNS_ and self.Option_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOption>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Option), input_name='Option')), namespaceprefix_ , eol_)) + if self.LabelCount is not None: + namespaceprefix_ = self.LabelCount_nsprefix_ + ':' if (UseCapturedNS_ and self.LabelCount_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLabelCount>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.LabelCount, input_name='LabelCount'), namespaceprefix_ , eol_)) + if self.ImageParameters is not None: + namespaceprefix_ = self.ImageParameters_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageParameters_nsprefix_) else '' + self.ImageParameters.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ImageParameters', pretty_print=pretty_print) + if self.RetailerName is not None: + namespaceprefix_ = self.RetailerName_nsprefix_ + ':' if (UseCapturedNS_ and self.RetailerName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRetailerName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RetailerName), input_name='RetailerName')), namespaceprefix_ , eol_)) + if self.RetailerAddress is not None: + namespaceprefix_ = self.RetailerAddress_nsprefix_ + ':' if (UseCapturedNS_ and self.RetailerAddress_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRetailerAddress>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RetailerAddress), input_name='RetailerAddress')), namespaceprefix_ , eol_)) + if self.PermitNumber is not None: + namespaceprefix_ = self.PermitNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitNumber>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.PermitNumber, input_name='PermitNumber'), namespaceprefix_ , eol_)) + if self.PermitIssuingPOCity is not None: + namespaceprefix_ = self.PermitIssuingPOCity_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitIssuingPOCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitIssuingPOCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PermitIssuingPOCity), input_name='PermitIssuingPOCity')), namespaceprefix_ , eol_)) + if self.PermitIssuingPOState is not None: + namespaceprefix_ = self.PermitIssuingPOState_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitIssuingPOState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitIssuingPOState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PermitIssuingPOState), input_name='PermitIssuingPOState')), namespaceprefix_ , eol_)) + if self.PermitIssuingPOZip5 is not None: + namespaceprefix_ = self.PermitIssuingPOZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitIssuingPOZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitIssuingPOZip5>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.PermitIssuingPOZip5, input_name='PermitIssuingPOZip5'), namespaceprefix_ , eol_)) + if self.PDUFirmName is not None: + namespaceprefix_ = self.PDUFirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.PDUFirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPDUFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PDUFirmName), input_name='PDUFirmName')), namespaceprefix_ , eol_)) + if self.PDUPOBox is not None: + namespaceprefix_ = self.PDUPOBox_nsprefix_ + ':' if (UseCapturedNS_ and self.PDUPOBox_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPDUPOBox>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PDUPOBox), input_name='PDUPOBox')), namespaceprefix_ , eol_)) + if self.PDUCity is not None: + namespaceprefix_ = self.PDUCity_nsprefix_ + ':' if (UseCapturedNS_ and self.PDUCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPDUCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PDUCity), input_name='PDUCity')), namespaceprefix_ , eol_)) + if self.PDUState is not None: + namespaceprefix_ = self.PDUState_nsprefix_ + ':' if (UseCapturedNS_ and self.PDUState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPDUState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PDUState), input_name='PDUState')), namespaceprefix_ , eol_)) + if self.PDUZip5 is not None: + namespaceprefix_ = self.PDUZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.PDUZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPDUZip5>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.PDUZip5, input_name='PDUZip5'), namespaceprefix_ , eol_)) + if self.PDUZip4 is not None: + namespaceprefix_ = self.PDUZip4_nsprefix_ + ':' if (UseCapturedNS_ and self.PDUZip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPDUZip4>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.PDUZip4, input_name='PDUZip4'), namespaceprefix_ , eol_)) + if self.ServiceType is not None: + namespaceprefix_ = self.ServiceType_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceType), input_name='ServiceType')), namespaceprefix_ , eol_)) + if self.DeliveryConfirmation is not None: + namespaceprefix_ = self.DeliveryConfirmation_nsprefix_ + ':' if (UseCapturedNS_ and self.DeliveryConfirmation_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDeliveryConfirmation>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DeliveryConfirmation), input_name='DeliveryConfirmation')), namespaceprefix_ , eol_)) + if self.InsuranceValue is not None: + namespaceprefix_ = self.InsuranceValue_nsprefix_ + ':' if (UseCapturedNS_ and self.InsuranceValue_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInsuranceValue>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InsuranceValue), input_name='InsuranceValue')), namespaceprefix_ , eol_)) + if self.MailingAckPackageID is not None: + namespaceprefix_ = self.MailingAckPackageID_nsprefix_ + ':' if (UseCapturedNS_ and self.MailingAckPackageID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMailingAckPackageID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MailingAckPackageID), input_name='MailingAckPackageID')), namespaceprefix_ , eol_)) + if self.WeightInPounds is not None: + namespaceprefix_ = self.WeightInPounds_nsprefix_ + ':' if (UseCapturedNS_ and self.WeightInPounds_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sWeightInPounds>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.WeightInPounds, input_name='WeightInPounds'), namespaceprefix_ , eol_)) + if self.WeightInOunces is not None: + namespaceprefix_ = self.WeightInOunces_nsprefix_ + ':' if (UseCapturedNS_ and self.WeightInOunces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sWeightInOunces>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.WeightInOunces, input_name='WeightInOunces'), namespaceprefix_ , eol_)) + if self.RMA is not None: + namespaceprefix_ = self.RMA_nsprefix_ + ':' if (UseCapturedNS_ and self.RMA_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRMA>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RMA), input_name='RMA')), namespaceprefix_ , eol_)) + if self.RMAPICFlag is not None: + namespaceprefix_ = self.RMAPICFlag_nsprefix_ + ':' if (UseCapturedNS_ and self.RMAPICFlag_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRMAPICFlag>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RMAPICFlag), input_name='RMAPICFlag')), namespaceprefix_ , eol_)) + if self.ImageType is not None: + namespaceprefix_ = self.ImageType_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageType), input_name='ImageType')), namespaceprefix_ , eol_)) + if self.RMABarcode is not None: + namespaceprefix_ = self.RMABarcode_nsprefix_ + ':' if (UseCapturedNS_ and self.RMABarcode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRMABarcode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RMABarcode), input_name='RMABarcode')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Option': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Option') + value_ = self.gds_validate_string(value_, node, 'Option') + self.Option = value_ + self.Option_nsprefix_ = child_.prefix + elif nodeName_ == 'LabelCount' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'LabelCount') + ival_ = self.gds_validate_integer(ival_, node, 'LabelCount') + self.LabelCount = ival_ + self.LabelCount_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageParameters': + obj_ = ImageParametersType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ImageParameters = obj_ + obj_.original_tagname_ = 'ImageParameters' + elif nodeName_ == 'RetailerName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RetailerName') + value_ = self.gds_validate_string(value_, node, 'RetailerName') + self.RetailerName = value_ + self.RetailerName_nsprefix_ = child_.prefix + elif nodeName_ == 'RetailerAddress': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RetailerAddress') + value_ = self.gds_validate_string(value_, node, 'RetailerAddress') + self.RetailerAddress = value_ + self.RetailerAddress_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitNumber' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'PermitNumber') + ival_ = self.gds_validate_integer(ival_, node, 'PermitNumber') + self.PermitNumber = ival_ + self.PermitNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitIssuingPOCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PermitIssuingPOCity') + value_ = self.gds_validate_string(value_, node, 'PermitIssuingPOCity') + self.PermitIssuingPOCity = value_ + self.PermitIssuingPOCity_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitIssuingPOState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PermitIssuingPOState') + value_ = self.gds_validate_string(value_, node, 'PermitIssuingPOState') + self.PermitIssuingPOState = value_ + self.PermitIssuingPOState_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitIssuingPOZip5' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'PermitIssuingPOZip5') + ival_ = self.gds_validate_integer(ival_, node, 'PermitIssuingPOZip5') + self.PermitIssuingPOZip5 = ival_ + self.PermitIssuingPOZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'PDUFirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PDUFirmName') + value_ = self.gds_validate_string(value_, node, 'PDUFirmName') + self.PDUFirmName = value_ + self.PDUFirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'PDUPOBox': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PDUPOBox') + value_ = self.gds_validate_string(value_, node, 'PDUPOBox') + self.PDUPOBox = value_ + self.PDUPOBox_nsprefix_ = child_.prefix + elif nodeName_ == 'PDUCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PDUCity') + value_ = self.gds_validate_string(value_, node, 'PDUCity') + self.PDUCity = value_ + self.PDUCity_nsprefix_ = child_.prefix + elif nodeName_ == 'PDUState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PDUState') + value_ = self.gds_validate_string(value_, node, 'PDUState') + self.PDUState = value_ + self.PDUState_nsprefix_ = child_.prefix + elif nodeName_ == 'PDUZip5' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'PDUZip5') + ival_ = self.gds_validate_integer(ival_, node, 'PDUZip5') + self.PDUZip5 = ival_ + self.PDUZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'PDUZip4' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'PDUZip4') + ival_ = self.gds_validate_integer(ival_, node, 'PDUZip4') + self.PDUZip4 = ival_ + self.PDUZip4_nsprefix_ = child_.prefix + elif nodeName_ == 'ServiceType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceType') + value_ = self.gds_validate_string(value_, node, 'ServiceType') + self.ServiceType = value_ + self.ServiceType_nsprefix_ = child_.prefix + elif nodeName_ == 'DeliveryConfirmation': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DeliveryConfirmation') + value_ = self.gds_validate_string(value_, node, 'DeliveryConfirmation') + self.DeliveryConfirmation = value_ + self.DeliveryConfirmation_nsprefix_ = child_.prefix + elif nodeName_ == 'InsuranceValue': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'InsuranceValue') + value_ = self.gds_validate_string(value_, node, 'InsuranceValue') + self.InsuranceValue = value_ + self.InsuranceValue_nsprefix_ = child_.prefix + elif nodeName_ == 'MailingAckPackageID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'MailingAckPackageID') + value_ = self.gds_validate_string(value_, node, 'MailingAckPackageID') + self.MailingAckPackageID = value_ + self.MailingAckPackageID_nsprefix_ = child_.prefix + elif nodeName_ == 'WeightInPounds' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'WeightInPounds') + ival_ = self.gds_validate_integer(ival_, node, 'WeightInPounds') + self.WeightInPounds = ival_ + self.WeightInPounds_nsprefix_ = child_.prefix + elif nodeName_ == 'WeightInOunces' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'WeightInOunces') + ival_ = self.gds_validate_integer(ival_, node, 'WeightInOunces') + self.WeightInOunces = ival_ + self.WeightInOunces_nsprefix_ = child_.prefix + elif nodeName_ == 'RMA': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RMA') + value_ = self.gds_validate_string(value_, node, 'RMA') + self.RMA = value_ + self.RMA_nsprefix_ = child_.prefix + elif nodeName_ == 'RMAPICFlag': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RMAPICFlag') + value_ = self.gds_validate_string(value_, node, 'RMAPICFlag') + self.RMAPICFlag = value_ + self.RMAPICFlag_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageType') + value_ = self.gds_validate_string(value_, node, 'ImageType') + self.ImageType = value_ + self.ImageType_nsprefix_ = child_.prefix + elif nodeName_ == 'RMABarcode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RMABarcode') + value_ = self.gds_validate_string(value_, node, 'RMABarcode') + self.RMABarcode = value_ + self.RMABarcode_nsprefix_ = child_.prefix +# end class EMRSV4_0BulkRequest + + +class ImageParametersType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ImageParameter=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if ImageParameter is None: + self.ImageParameter = [] + else: + self.ImageParameter = ImageParameter + self.ImageParameter_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ImageParametersType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ImageParametersType.subclass: + return ImageParametersType.subclass(*args_, **kwargs_) + else: + return ImageParametersType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ImageParameter(self): + return self.ImageParameter + def set_ImageParameter(self, ImageParameter): + self.ImageParameter = ImageParameter + def add_ImageParameter(self, value): + self.ImageParameter.append(value) + def insert_ImageParameter_at(self, index, value): + self.ImageParameter.insert(index, value) + def replace_ImageParameter_at(self, index, value): + self.ImageParameter[index] = value + def has__content(self): + if ( + self.ImageParameter + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ImageParametersType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ImageParametersType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ImageParametersType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ImageParametersType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ImageParametersType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ImageParametersType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ImageParametersType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for ImageParameter_ in self.ImageParameter: + namespaceprefix_ = self.ImageParameter_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageParameter_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageParameter>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(ImageParameter_), input_name='ImageParameter')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ImageParameter': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageParameter') + value_ = self.gds_validate_string(value_, node, 'ImageParameter') + self.ImageParameter.append(value_) + self.ImageParameter_nsprefix_ = child_.prefix +# end class ImageParametersType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'EMRSV4_0BulkRequest' + rootClass = EMRSV4_0BulkRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'EMRSV4_0BulkRequest' + rootClass = EMRSV4_0BulkRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'EMRSV4_0BulkRequest' + rootClass = EMRSV4_0BulkRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'EMRSV4_0BulkRequest' + rootClass = EMRSV4_0BulkRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from emrsv4_0_bulk_request import *\n\n') + sys.stdout.write('import emrsv4_0_bulk_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "EMRSV4_0BulkRequest", + "ImageParametersType" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/error.py b/modules/connectors/usps_international/karrio/schemas/usps_international/error.py new file mode 100644 index 0000000000..9bd68ff392 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/error.py @@ -0,0 +1,1379 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:04 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/error.py') +# +# Command line arguments: +# ./schemas/Error.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/error.py" ./schemas/Error.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class Error(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Number=None, Source=None, Description=None, HelpFile=None, HelpContext=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Number = Number + self.Number_nsprefix_ = None + self.Source = Source + self.Source_nsprefix_ = None + self.Description = Description + self.Description_nsprefix_ = None + self.HelpFile = HelpFile + self.HelpFile_nsprefix_ = None + self.HelpContext = HelpContext + self.HelpContext_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, Error) + if subclass is not None: + return subclass(*args_, **kwargs_) + if Error.subclass: + return Error.subclass(*args_, **kwargs_) + else: + return Error(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Number(self): + return self.Number + def set_Number(self, Number): + self.Number = Number + def get_Source(self): + return self.Source + def set_Source(self, Source): + self.Source = Source + def get_Description(self): + return self.Description + def set_Description(self, Description): + self.Description = Description + def get_HelpFile(self): + return self.HelpFile + def set_HelpFile(self, HelpFile): + self.HelpFile = HelpFile + def get_HelpContext(self): + return self.HelpContext + def set_HelpContext(self, HelpContext): + self.HelpContext = HelpContext + def has__content(self): + if ( + self.Number is not None or + self.Source is not None or + self.Description is not None or + self.HelpFile is not None or + self.HelpContext is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Error', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('Error') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'Error': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Error') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Error', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Error'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Error', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Number is not None: + namespaceprefix_ = self.Number_nsprefix_ + ':' if (UseCapturedNS_ and self.Number_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Number), input_name='Number')), namespaceprefix_ , eol_)) + if self.Source is not None: + namespaceprefix_ = self.Source_nsprefix_ + ':' if (UseCapturedNS_ and self.Source_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSource>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Source), input_name='Source')), namespaceprefix_ , eol_)) + if self.Description is not None: + namespaceprefix_ = self.Description_nsprefix_ + ':' if (UseCapturedNS_ and self.Description_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDescription>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Description), input_name='Description')), namespaceprefix_ , eol_)) + if self.HelpFile is not None: + namespaceprefix_ = self.HelpFile_nsprefix_ + ':' if (UseCapturedNS_ and self.HelpFile_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHelpFile>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.HelpFile), input_name='HelpFile')), namespaceprefix_ , eol_)) + if self.HelpContext is not None: + namespaceprefix_ = self.HelpContext_nsprefix_ + ':' if (UseCapturedNS_ and self.HelpContext_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHelpContext>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.HelpContext), input_name='HelpContext')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Number': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Number') + value_ = self.gds_validate_string(value_, node, 'Number') + self.Number = value_ + self.Number_nsprefix_ = child_.prefix + elif nodeName_ == 'Source': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Source') + value_ = self.gds_validate_string(value_, node, 'Source') + self.Source = value_ + self.Source_nsprefix_ = child_.prefix + elif nodeName_ == 'Description': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Description') + value_ = self.gds_validate_string(value_, node, 'Description') + self.Description = value_ + self.Description_nsprefix_ = child_.prefix + elif nodeName_ == 'HelpFile': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'HelpFile') + value_ = self.gds_validate_string(value_, node, 'HelpFile') + self.HelpFile = value_ + self.HelpFile_nsprefix_ = child_.prefix + elif nodeName_ == 'HelpContext': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'HelpContext') + value_ = self.gds_validate_string(value_, node, 'HelpContext') + self.HelpContext = value_ + self.HelpContext_nsprefix_ = child_.prefix +# end class Error + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'Error' + rootClass = Error + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'Error' + rootClass = Error + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'Error' + rootClass = Error + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'Error' + rootClass = Error + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from error import *\n\n') + sys.stdout.write('import error as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "Error" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/evs_cancel_request.py b/modules/connectors/usps_international/karrio/schemas/usps_international/evs_cancel_request.py new file mode 100644 index 0000000000..8d9158b412 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/evs_cancel_request.py @@ -0,0 +1,1335 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:10 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/evs_cancel_request.py') +# +# Command line arguments: +# ./schemas/eVSCancelRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/evs_cancel_request.py" ./schemas/eVSCancelRequest.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class eVSCancelRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, BarcodeNumber=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.BarcodeNumber = BarcodeNumber + self.BarcodeNumber_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, eVSCancelRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if eVSCancelRequest.subclass: + return eVSCancelRequest.subclass(*args_, **kwargs_) + else: + return eVSCancelRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_BarcodeNumber(self): + return self.BarcodeNumber + def set_BarcodeNumber(self, BarcodeNumber): + self.BarcodeNumber = BarcodeNumber + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.BarcodeNumber is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSCancelRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('eVSCancelRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'eVSCancelRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='eVSCancelRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='eVSCancelRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='eVSCancelRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSCancelRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.BarcodeNumber is not None: + namespaceprefix_ = self.BarcodeNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.BarcodeNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBarcodeNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BarcodeNumber), input_name='BarcodeNumber')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'BarcodeNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BarcodeNumber') + value_ = self.gds_validate_string(value_, node, 'BarcodeNumber') + self.BarcodeNumber = value_ + self.BarcodeNumber_nsprefix_ = child_.prefix +# end class eVSCancelRequest + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSCancelRequest' + rootClass = eVSCancelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSCancelRequest' + rootClass = eVSCancelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSCancelRequest' + rootClass = eVSCancelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSCancelRequest' + rootClass = eVSCancelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from evs_cancel_request import *\n\n') + sys.stdout.write('import evs_cancel_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "eVSCancelRequest" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/evs_cancel_response.py b/modules/connectors/usps_international/karrio/schemas/usps_international/evs_cancel_response.py new file mode 100644 index 0000000000..59ea913966 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/evs_cancel_response.py @@ -0,0 +1,1345 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:10 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/evs_cancel_response.py') +# +# Command line arguments: +# ./schemas/eVSCancelResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/evs_cancel_response.py" ./schemas/eVSCancelResponse.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class eVSCancelResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, BarcodeNumber=None, Status=None, Reason=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.BarcodeNumber = BarcodeNumber + self.BarcodeNumber_nsprefix_ = None + self.Status = Status + self.Status_nsprefix_ = None + self.Reason = Reason + self.Reason_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, eVSCancelResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if eVSCancelResponse.subclass: + return eVSCancelResponse.subclass(*args_, **kwargs_) + else: + return eVSCancelResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_BarcodeNumber(self): + return self.BarcodeNumber + def set_BarcodeNumber(self, BarcodeNumber): + self.BarcodeNumber = BarcodeNumber + def get_Status(self): + return self.Status + def set_Status(self, Status): + self.Status = Status + def get_Reason(self): + return self.Reason + def set_Reason(self, Reason): + self.Reason = Reason + def has__content(self): + if ( + self.BarcodeNumber is not None or + self.Status is not None or + self.Reason is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSCancelResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('eVSCancelResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'eVSCancelResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='eVSCancelResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='eVSCancelResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='eVSCancelResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSCancelResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.BarcodeNumber is not None: + namespaceprefix_ = self.BarcodeNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.BarcodeNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBarcodeNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BarcodeNumber), input_name='BarcodeNumber')), namespaceprefix_ , eol_)) + if self.Status is not None: + namespaceprefix_ = self.Status_nsprefix_ + ':' if (UseCapturedNS_ and self.Status_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sStatus>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Status), input_name='Status')), namespaceprefix_ , eol_)) + if self.Reason is not None: + namespaceprefix_ = self.Reason_nsprefix_ + ':' if (UseCapturedNS_ and self.Reason_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sReason>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Reason), input_name='Reason')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'BarcodeNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BarcodeNumber') + value_ = self.gds_validate_string(value_, node, 'BarcodeNumber') + self.BarcodeNumber = value_ + self.BarcodeNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'Status': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Status') + value_ = self.gds_validate_string(value_, node, 'Status') + self.Status = value_ + self.Status_nsprefix_ = child_.prefix + elif nodeName_ == 'Reason': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Reason') + value_ = self.gds_validate_string(value_, node, 'Reason') + self.Reason = value_ + self.Reason_nsprefix_ = child_.prefix +# end class eVSCancelResponse + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSCancelResponse' + rootClass = eVSCancelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSCancelResponse' + rootClass = eVSCancelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSCancelResponse' + rootClass = eVSCancelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSCancelResponse' + rootClass = eVSCancelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from evs_cancel_response import *\n\n') + sys.stdout.write('import evs_cancel_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "eVSCancelResponse" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/evs_express_mail_intl_request.py b/modules/connectors/usps_international/karrio/schemas/usps_international/evs_express_mail_intl_request.py new file mode 100644 index 0000000000..3238e41efd --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/evs_express_mail_intl_request.py @@ -0,0 +1,3343 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:11 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/evs_express_mail_intl_request.py') +# +# Command line arguments: +# ./schemas/eVSExpressMailIntlRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/evs_express_mail_intl_request.py" ./schemas/eVSExpressMailIntlRequest.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class eVSExpressMailIntlRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, Option=None, Revision=None, ImageParameters=None, FromFirstName=None, FromMiddleInitial=None, FromLastName=None, FromFirm=None, FromAddress1=None, FromAddress2=None, FromUrbanization=None, FromCity=None, FromState=None, FromZip5=None, FromZip4=None, FromPhone=None, FromCustomsReference=None, ToName=None, ToFirstName=None, ToLastName=None, ToFirm=None, ToAddress1=None, ToAddress2=None, ToAddress3=None, ToCity=None, ToProvince=None, ToCountry=None, ToPostalCode=None, ToPOBoxFlag=None, ToPhone=None, ToFax=None, ToEmail=None, ImportersReferenceNumber=None, NonDeliveryOption=None, RedirectName=None, RedirectEmail=None, RedirectSMS=None, RedirectAddress=None, RedirectCity=None, RedirectState=None, RedirectZipCode=None, RedirectZip4=None, Container=None, ShippingContents=None, InsuredNumber=None, InsuredAmount=None, Postage=None, GrossPounds=None, GrossOunces=None, ContentType=None, ContentTypeOther=None, Agreement=None, Comments=None, LicenseNumber=None, CertificateNumber=None, InvoiceNumber=None, ImageType=None, ImageLayout=None, CustomerRefNo=None, CustomerRefNo2=None, POZipCode=None, LabelDate=None, EMCAAccount=None, HoldForManifest=None, EELPFC=None, PriceOptions=None, Length=None, Width=None, Height=None, Girth=None, LabelTime=None, MeterPaymentFlag=None, ActionCode=None, OptOutOfSPE=None, PermitNumber=None, AccountZipCode=None, ImportersReferenceType=None, ImportersTelephoneNumber=None, ImportersFaxNumber=None, ImportersEmail=None, Machinable=None, DestinationRateIndicator=None, MID=None, LogisticsManagerMID=None, CRID=None, VendorCode=None, VendorProductVersionNumber=None, ePostageMailerReporting=None, SenderFirstName=None, SenderLastName=None, SenderBusinessName=None, SenderAddress1=None, SenderCity=None, SenderState=None, SenderZip5=None, SenderPhone=None, SenderEmail=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.Option = Option + self.Option_nsprefix_ = None + self.Revision = Revision + self.Revision_nsprefix_ = None + self.ImageParameters = ImageParameters + self.ImageParameters_nsprefix_ = None + self.FromFirstName = FromFirstName + self.FromFirstName_nsprefix_ = None + self.FromMiddleInitial = FromMiddleInitial + self.FromMiddleInitial_nsprefix_ = None + self.FromLastName = FromLastName + self.FromLastName_nsprefix_ = None + self.FromFirm = FromFirm + self.FromFirm_nsprefix_ = None + self.FromAddress1 = FromAddress1 + self.FromAddress1_nsprefix_ = None + self.FromAddress2 = FromAddress2 + self.FromAddress2_nsprefix_ = None + self.FromUrbanization = FromUrbanization + self.FromUrbanization_nsprefix_ = None + self.FromCity = FromCity + self.FromCity_nsprefix_ = None + self.FromState = FromState + self.FromState_nsprefix_ = None + self.FromZip5 = FromZip5 + self.FromZip5_nsprefix_ = None + self.FromZip4 = FromZip4 + self.FromZip4_nsprefix_ = None + self.FromPhone = FromPhone + self.FromPhone_nsprefix_ = None + self.FromCustomsReference = FromCustomsReference + self.FromCustomsReference_nsprefix_ = None + self.ToName = ToName + self.ToName_nsprefix_ = None + self.ToFirstName = ToFirstName + self.ToFirstName_nsprefix_ = None + self.ToLastName = ToLastName + self.ToLastName_nsprefix_ = None + self.ToFirm = ToFirm + self.ToFirm_nsprefix_ = None + self.ToAddress1 = ToAddress1 + self.ToAddress1_nsprefix_ = None + self.ToAddress2 = ToAddress2 + self.ToAddress2_nsprefix_ = None + self.ToAddress3 = ToAddress3 + self.ToAddress3_nsprefix_ = None + self.ToCity = ToCity + self.ToCity_nsprefix_ = None + self.ToProvince = ToProvince + self.ToProvince_nsprefix_ = None + self.ToCountry = ToCountry + self.ToCountry_nsprefix_ = None + self.ToPostalCode = ToPostalCode + self.ToPostalCode_nsprefix_ = None + self.ToPOBoxFlag = ToPOBoxFlag + self.ToPOBoxFlag_nsprefix_ = None + self.ToPhone = ToPhone + self.ToPhone_nsprefix_ = None + self.ToFax = ToFax + self.ToFax_nsprefix_ = None + self.ToEmail = ToEmail + self.ToEmail_nsprefix_ = None + self.ImportersReferenceNumber = ImportersReferenceNumber + self.ImportersReferenceNumber_nsprefix_ = None + self.NonDeliveryOption = NonDeliveryOption + self.NonDeliveryOption_nsprefix_ = None + self.RedirectName = RedirectName + self.RedirectName_nsprefix_ = None + self.RedirectEmail = RedirectEmail + self.RedirectEmail_nsprefix_ = None + self.RedirectSMS = RedirectSMS + self.RedirectSMS_nsprefix_ = None + self.RedirectAddress = RedirectAddress + self.RedirectAddress_nsprefix_ = None + self.RedirectCity = RedirectCity + self.RedirectCity_nsprefix_ = None + self.RedirectState = RedirectState + self.RedirectState_nsprefix_ = None + self.RedirectZipCode = RedirectZipCode + self.RedirectZipCode_nsprefix_ = None + self.RedirectZip4 = RedirectZip4 + self.RedirectZip4_nsprefix_ = None + self.Container = Container + self.Container_nsprefix_ = None + self.ShippingContents = ShippingContents + self.ShippingContents_nsprefix_ = None + self.InsuredNumber = InsuredNumber + self.InsuredNumber_nsprefix_ = None + self.InsuredAmount = InsuredAmount + self.InsuredAmount_nsprefix_ = None + self.Postage = Postage + self.Postage_nsprefix_ = None + self.GrossPounds = GrossPounds + self.GrossPounds_nsprefix_ = None + self.GrossOunces = GrossOunces + self.GrossOunces_nsprefix_ = None + self.ContentType = ContentType + self.ContentType_nsprefix_ = None + self.ContentTypeOther = ContentTypeOther + self.ContentTypeOther_nsprefix_ = None + self.Agreement = Agreement + self.Agreement_nsprefix_ = None + self.Comments = Comments + self.Comments_nsprefix_ = None + self.LicenseNumber = LicenseNumber + self.LicenseNumber_nsprefix_ = None + self.CertificateNumber = CertificateNumber + self.CertificateNumber_nsprefix_ = None + self.InvoiceNumber = InvoiceNumber + self.InvoiceNumber_nsprefix_ = None + self.ImageType = ImageType + self.ImageType_nsprefix_ = None + self.ImageLayout = ImageLayout + self.ImageLayout_nsprefix_ = None + self.CustomerRefNo = CustomerRefNo + self.CustomerRefNo_nsprefix_ = None + self.CustomerRefNo2 = CustomerRefNo2 + self.CustomerRefNo2_nsprefix_ = None + self.POZipCode = POZipCode + self.POZipCode_nsprefix_ = None + self.LabelDate = LabelDate + self.LabelDate_nsprefix_ = None + self.EMCAAccount = EMCAAccount + self.EMCAAccount_nsprefix_ = None + self.HoldForManifest = HoldForManifest + self.HoldForManifest_nsprefix_ = None + self.EELPFC = EELPFC + self.EELPFC_nsprefix_ = None + self.PriceOptions = PriceOptions + self.PriceOptions_nsprefix_ = None + self.Length = Length + self.Length_nsprefix_ = None + self.Width = Width + self.Width_nsprefix_ = None + self.Height = Height + self.Height_nsprefix_ = None + self.Girth = Girth + self.Girth_nsprefix_ = None + self.LabelTime = LabelTime + self.LabelTime_nsprefix_ = None + self.MeterPaymentFlag = MeterPaymentFlag + self.MeterPaymentFlag_nsprefix_ = None + self.ActionCode = ActionCode + self.ActionCode_nsprefix_ = None + self.OptOutOfSPE = OptOutOfSPE + self.OptOutOfSPE_nsprefix_ = None + self.PermitNumber = PermitNumber + self.PermitNumber_nsprefix_ = None + self.AccountZipCode = AccountZipCode + self.AccountZipCode_nsprefix_ = None + self.ImportersReferenceType = ImportersReferenceType + self.ImportersReferenceType_nsprefix_ = None + self.ImportersTelephoneNumber = ImportersTelephoneNumber + self.ImportersTelephoneNumber_nsprefix_ = None + self.ImportersFaxNumber = ImportersFaxNumber + self.ImportersFaxNumber_nsprefix_ = None + self.ImportersEmail = ImportersEmail + self.ImportersEmail_nsprefix_ = None + self.Machinable = Machinable + self.Machinable_nsprefix_ = None + self.DestinationRateIndicator = DestinationRateIndicator + self.DestinationRateIndicator_nsprefix_ = None + self.MID = MID + self.MID_nsprefix_ = None + self.LogisticsManagerMID = LogisticsManagerMID + self.LogisticsManagerMID_nsprefix_ = None + self.CRID = CRID + self.CRID_nsprefix_ = None + self.VendorCode = VendorCode + self.VendorCode_nsprefix_ = None + self.VendorProductVersionNumber = VendorProductVersionNumber + self.VendorProductVersionNumber_nsprefix_ = None + self.ePostageMailerReporting = ePostageMailerReporting + self.ePostageMailerReporting_nsprefix_ = None + self.SenderFirstName = SenderFirstName + self.SenderFirstName_nsprefix_ = None + self.SenderLastName = SenderLastName + self.SenderLastName_nsprefix_ = None + self.SenderBusinessName = SenderBusinessName + self.SenderBusinessName_nsprefix_ = None + self.SenderAddress1 = SenderAddress1 + self.SenderAddress1_nsprefix_ = None + self.SenderCity = SenderCity + self.SenderCity_nsprefix_ = None + self.SenderState = SenderState + self.SenderState_nsprefix_ = None + self.SenderZip5 = SenderZip5 + self.SenderZip5_nsprefix_ = None + self.SenderPhone = SenderPhone + self.SenderPhone_nsprefix_ = None + self.SenderEmail = SenderEmail + self.SenderEmail_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, eVSExpressMailIntlRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if eVSExpressMailIntlRequest.subclass: + return eVSExpressMailIntlRequest.subclass(*args_, **kwargs_) + else: + return eVSExpressMailIntlRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Option(self): + return self.Option + def set_Option(self, Option): + self.Option = Option + def get_Revision(self): + return self.Revision + def set_Revision(self, Revision): + self.Revision = Revision + def get_ImageParameters(self): + return self.ImageParameters + def set_ImageParameters(self, ImageParameters): + self.ImageParameters = ImageParameters + def get_FromFirstName(self): + return self.FromFirstName + def set_FromFirstName(self, FromFirstName): + self.FromFirstName = FromFirstName + def get_FromMiddleInitial(self): + return self.FromMiddleInitial + def set_FromMiddleInitial(self, FromMiddleInitial): + self.FromMiddleInitial = FromMiddleInitial + def get_FromLastName(self): + return self.FromLastName + def set_FromLastName(self, FromLastName): + self.FromLastName = FromLastName + def get_FromFirm(self): + return self.FromFirm + def set_FromFirm(self, FromFirm): + self.FromFirm = FromFirm + def get_FromAddress1(self): + return self.FromAddress1 + def set_FromAddress1(self, FromAddress1): + self.FromAddress1 = FromAddress1 + def get_FromAddress2(self): + return self.FromAddress2 + def set_FromAddress2(self, FromAddress2): + self.FromAddress2 = FromAddress2 + def get_FromUrbanization(self): + return self.FromUrbanization + def set_FromUrbanization(self, FromUrbanization): + self.FromUrbanization = FromUrbanization + def get_FromCity(self): + return self.FromCity + def set_FromCity(self, FromCity): + self.FromCity = FromCity + def get_FromState(self): + return self.FromState + def set_FromState(self, FromState): + self.FromState = FromState + def get_FromZip5(self): + return self.FromZip5 + def set_FromZip5(self, FromZip5): + self.FromZip5 = FromZip5 + def get_FromZip4(self): + return self.FromZip4 + def set_FromZip4(self, FromZip4): + self.FromZip4 = FromZip4 + def get_FromPhone(self): + return self.FromPhone + def set_FromPhone(self, FromPhone): + self.FromPhone = FromPhone + def get_FromCustomsReference(self): + return self.FromCustomsReference + def set_FromCustomsReference(self, FromCustomsReference): + self.FromCustomsReference = FromCustomsReference + def get_ToName(self): + return self.ToName + def set_ToName(self, ToName): + self.ToName = ToName + def get_ToFirstName(self): + return self.ToFirstName + def set_ToFirstName(self, ToFirstName): + self.ToFirstName = ToFirstName + def get_ToLastName(self): + return self.ToLastName + def set_ToLastName(self, ToLastName): + self.ToLastName = ToLastName + def get_ToFirm(self): + return self.ToFirm + def set_ToFirm(self, ToFirm): + self.ToFirm = ToFirm + def get_ToAddress1(self): + return self.ToAddress1 + def set_ToAddress1(self, ToAddress1): + self.ToAddress1 = ToAddress1 + def get_ToAddress2(self): + return self.ToAddress2 + def set_ToAddress2(self, ToAddress2): + self.ToAddress2 = ToAddress2 + def get_ToAddress3(self): + return self.ToAddress3 + def set_ToAddress3(self, ToAddress3): + self.ToAddress3 = ToAddress3 + def get_ToCity(self): + return self.ToCity + def set_ToCity(self, ToCity): + self.ToCity = ToCity + def get_ToProvince(self): + return self.ToProvince + def set_ToProvince(self, ToProvince): + self.ToProvince = ToProvince + def get_ToCountry(self): + return self.ToCountry + def set_ToCountry(self, ToCountry): + self.ToCountry = ToCountry + def get_ToPostalCode(self): + return self.ToPostalCode + def set_ToPostalCode(self, ToPostalCode): + self.ToPostalCode = ToPostalCode + def get_ToPOBoxFlag(self): + return self.ToPOBoxFlag + def set_ToPOBoxFlag(self, ToPOBoxFlag): + self.ToPOBoxFlag = ToPOBoxFlag + def get_ToPhone(self): + return self.ToPhone + def set_ToPhone(self, ToPhone): + self.ToPhone = ToPhone + def get_ToFax(self): + return self.ToFax + def set_ToFax(self, ToFax): + self.ToFax = ToFax + def get_ToEmail(self): + return self.ToEmail + def set_ToEmail(self, ToEmail): + self.ToEmail = ToEmail + def get_ImportersReferenceNumber(self): + return self.ImportersReferenceNumber + def set_ImportersReferenceNumber(self, ImportersReferenceNumber): + self.ImportersReferenceNumber = ImportersReferenceNumber + def get_NonDeliveryOption(self): + return self.NonDeliveryOption + def set_NonDeliveryOption(self, NonDeliveryOption): + self.NonDeliveryOption = NonDeliveryOption + def get_RedirectName(self): + return self.RedirectName + def set_RedirectName(self, RedirectName): + self.RedirectName = RedirectName + def get_RedirectEmail(self): + return self.RedirectEmail + def set_RedirectEmail(self, RedirectEmail): + self.RedirectEmail = RedirectEmail + def get_RedirectSMS(self): + return self.RedirectSMS + def set_RedirectSMS(self, RedirectSMS): + self.RedirectSMS = RedirectSMS + def get_RedirectAddress(self): + return self.RedirectAddress + def set_RedirectAddress(self, RedirectAddress): + self.RedirectAddress = RedirectAddress + def get_RedirectCity(self): + return self.RedirectCity + def set_RedirectCity(self, RedirectCity): + self.RedirectCity = RedirectCity + def get_RedirectState(self): + return self.RedirectState + def set_RedirectState(self, RedirectState): + self.RedirectState = RedirectState + def get_RedirectZipCode(self): + return self.RedirectZipCode + def set_RedirectZipCode(self, RedirectZipCode): + self.RedirectZipCode = RedirectZipCode + def get_RedirectZip4(self): + return self.RedirectZip4 + def set_RedirectZip4(self, RedirectZip4): + self.RedirectZip4 = RedirectZip4 + def get_Container(self): + return self.Container + def set_Container(self, Container): + self.Container = Container + def get_ShippingContents(self): + return self.ShippingContents + def set_ShippingContents(self, ShippingContents): + self.ShippingContents = ShippingContents + def get_InsuredNumber(self): + return self.InsuredNumber + def set_InsuredNumber(self, InsuredNumber): + self.InsuredNumber = InsuredNumber + def get_InsuredAmount(self): + return self.InsuredAmount + def set_InsuredAmount(self, InsuredAmount): + self.InsuredAmount = InsuredAmount + def get_Postage(self): + return self.Postage + def set_Postage(self, Postage): + self.Postage = Postage + def get_GrossPounds(self): + return self.GrossPounds + def set_GrossPounds(self, GrossPounds): + self.GrossPounds = GrossPounds + def get_GrossOunces(self): + return self.GrossOunces + def set_GrossOunces(self, GrossOunces): + self.GrossOunces = GrossOunces + def get_ContentType(self): + return self.ContentType + def set_ContentType(self, ContentType): + self.ContentType = ContentType + def get_ContentTypeOther(self): + return self.ContentTypeOther + def set_ContentTypeOther(self, ContentTypeOther): + self.ContentTypeOther = ContentTypeOther + def get_Agreement(self): + return self.Agreement + def set_Agreement(self, Agreement): + self.Agreement = Agreement + def get_Comments(self): + return self.Comments + def set_Comments(self, Comments): + self.Comments = Comments + def get_LicenseNumber(self): + return self.LicenseNumber + def set_LicenseNumber(self, LicenseNumber): + self.LicenseNumber = LicenseNumber + def get_CertificateNumber(self): + return self.CertificateNumber + def set_CertificateNumber(self, CertificateNumber): + self.CertificateNumber = CertificateNumber + def get_InvoiceNumber(self): + return self.InvoiceNumber + def set_InvoiceNumber(self, InvoiceNumber): + self.InvoiceNumber = InvoiceNumber + def get_ImageType(self): + return self.ImageType + def set_ImageType(self, ImageType): + self.ImageType = ImageType + def get_ImageLayout(self): + return self.ImageLayout + def set_ImageLayout(self, ImageLayout): + self.ImageLayout = ImageLayout + def get_CustomerRefNo(self): + return self.CustomerRefNo + def set_CustomerRefNo(self, CustomerRefNo): + self.CustomerRefNo = CustomerRefNo + def get_CustomerRefNo2(self): + return self.CustomerRefNo2 + def set_CustomerRefNo2(self, CustomerRefNo2): + self.CustomerRefNo2 = CustomerRefNo2 + def get_POZipCode(self): + return self.POZipCode + def set_POZipCode(self, POZipCode): + self.POZipCode = POZipCode + def get_LabelDate(self): + return self.LabelDate + def set_LabelDate(self, LabelDate): + self.LabelDate = LabelDate + def get_EMCAAccount(self): + return self.EMCAAccount + def set_EMCAAccount(self, EMCAAccount): + self.EMCAAccount = EMCAAccount + def get_HoldForManifest(self): + return self.HoldForManifest + def set_HoldForManifest(self, HoldForManifest): + self.HoldForManifest = HoldForManifest + def get_EELPFC(self): + return self.EELPFC + def set_EELPFC(self, EELPFC): + self.EELPFC = EELPFC + def get_PriceOptions(self): + return self.PriceOptions + def set_PriceOptions(self, PriceOptions): + self.PriceOptions = PriceOptions + def get_Length(self): + return self.Length + def set_Length(self, Length): + self.Length = Length + def get_Width(self): + return self.Width + def set_Width(self, Width): + self.Width = Width + def get_Height(self): + return self.Height + def set_Height(self, Height): + self.Height = Height + def get_Girth(self): + return self.Girth + def set_Girth(self, Girth): + self.Girth = Girth + def get_LabelTime(self): + return self.LabelTime + def set_LabelTime(self, LabelTime): + self.LabelTime = LabelTime + def get_MeterPaymentFlag(self): + return self.MeterPaymentFlag + def set_MeterPaymentFlag(self, MeterPaymentFlag): + self.MeterPaymentFlag = MeterPaymentFlag + def get_ActionCode(self): + return self.ActionCode + def set_ActionCode(self, ActionCode): + self.ActionCode = ActionCode + def get_OptOutOfSPE(self): + return self.OptOutOfSPE + def set_OptOutOfSPE(self, OptOutOfSPE): + self.OptOutOfSPE = OptOutOfSPE + def get_PermitNumber(self): + return self.PermitNumber + def set_PermitNumber(self, PermitNumber): + self.PermitNumber = PermitNumber + def get_AccountZipCode(self): + return self.AccountZipCode + def set_AccountZipCode(self, AccountZipCode): + self.AccountZipCode = AccountZipCode + def get_ImportersReferenceType(self): + return self.ImportersReferenceType + def set_ImportersReferenceType(self, ImportersReferenceType): + self.ImportersReferenceType = ImportersReferenceType + def get_ImportersTelephoneNumber(self): + return self.ImportersTelephoneNumber + def set_ImportersTelephoneNumber(self, ImportersTelephoneNumber): + self.ImportersTelephoneNumber = ImportersTelephoneNumber + def get_ImportersFaxNumber(self): + return self.ImportersFaxNumber + def set_ImportersFaxNumber(self, ImportersFaxNumber): + self.ImportersFaxNumber = ImportersFaxNumber + def get_ImportersEmail(self): + return self.ImportersEmail + def set_ImportersEmail(self, ImportersEmail): + self.ImportersEmail = ImportersEmail + def get_Machinable(self): + return self.Machinable + def set_Machinable(self, Machinable): + self.Machinable = Machinable + def get_DestinationRateIndicator(self): + return self.DestinationRateIndicator + def set_DestinationRateIndicator(self, DestinationRateIndicator): + self.DestinationRateIndicator = DestinationRateIndicator + def get_MID(self): + return self.MID + def set_MID(self, MID): + self.MID = MID + def get_LogisticsManagerMID(self): + return self.LogisticsManagerMID + def set_LogisticsManagerMID(self, LogisticsManagerMID): + self.LogisticsManagerMID = LogisticsManagerMID + def get_CRID(self): + return self.CRID + def set_CRID(self, CRID): + self.CRID = CRID + def get_VendorCode(self): + return self.VendorCode + def set_VendorCode(self, VendorCode): + self.VendorCode = VendorCode + def get_VendorProductVersionNumber(self): + return self.VendorProductVersionNumber + def set_VendorProductVersionNumber(self, VendorProductVersionNumber): + self.VendorProductVersionNumber = VendorProductVersionNumber + def get_ePostageMailerReporting(self): + return self.ePostageMailerReporting + def set_ePostageMailerReporting(self, ePostageMailerReporting): + self.ePostageMailerReporting = ePostageMailerReporting + def get_SenderFirstName(self): + return self.SenderFirstName + def set_SenderFirstName(self, SenderFirstName): + self.SenderFirstName = SenderFirstName + def get_SenderLastName(self): + return self.SenderLastName + def set_SenderLastName(self, SenderLastName): + self.SenderLastName = SenderLastName + def get_SenderBusinessName(self): + return self.SenderBusinessName + def set_SenderBusinessName(self, SenderBusinessName): + self.SenderBusinessName = SenderBusinessName + def get_SenderAddress1(self): + return self.SenderAddress1 + def set_SenderAddress1(self, SenderAddress1): + self.SenderAddress1 = SenderAddress1 + def get_SenderCity(self): + return self.SenderCity + def set_SenderCity(self, SenderCity): + self.SenderCity = SenderCity + def get_SenderState(self): + return self.SenderState + def set_SenderState(self, SenderState): + self.SenderState = SenderState + def get_SenderZip5(self): + return self.SenderZip5 + def set_SenderZip5(self, SenderZip5): + self.SenderZip5 = SenderZip5 + def get_SenderPhone(self): + return self.SenderPhone + def set_SenderPhone(self, SenderPhone): + self.SenderPhone = SenderPhone + def get_SenderEmail(self): + return self.SenderEmail + def set_SenderEmail(self, SenderEmail): + self.SenderEmail = SenderEmail + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.Option is not None or + self.Revision is not None or + self.ImageParameters is not None or + self.FromFirstName is not None or + self.FromMiddleInitial is not None or + self.FromLastName is not None or + self.FromFirm is not None or + self.FromAddress1 is not None or + self.FromAddress2 is not None or + self.FromUrbanization is not None or + self.FromCity is not None or + self.FromState is not None or + self.FromZip5 is not None or + self.FromZip4 is not None or + self.FromPhone is not None or + self.FromCustomsReference is not None or + self.ToName is not None or + self.ToFirstName is not None or + self.ToLastName is not None or + self.ToFirm is not None or + self.ToAddress1 is not None or + self.ToAddress2 is not None or + self.ToAddress3 is not None or + self.ToCity is not None or + self.ToProvince is not None or + self.ToCountry is not None or + self.ToPostalCode is not None or + self.ToPOBoxFlag is not None or + self.ToPhone is not None or + self.ToFax is not None or + self.ToEmail is not None or + self.ImportersReferenceNumber is not None or + self.NonDeliveryOption is not None or + self.RedirectName is not None or + self.RedirectEmail is not None or + self.RedirectSMS is not None or + self.RedirectAddress is not None or + self.RedirectCity is not None or + self.RedirectState is not None or + self.RedirectZipCode is not None or + self.RedirectZip4 is not None or + self.Container is not None or + self.ShippingContents is not None or + self.InsuredNumber is not None or + self.InsuredAmount is not None or + self.Postage is not None or + self.GrossPounds is not None or + self.GrossOunces is not None or + self.ContentType is not None or + self.ContentTypeOther is not None or + self.Agreement is not None or + self.Comments is not None or + self.LicenseNumber is not None or + self.CertificateNumber is not None or + self.InvoiceNumber is not None or + self.ImageType is not None or + self.ImageLayout is not None or + self.CustomerRefNo is not None or + self.CustomerRefNo2 is not None or + self.POZipCode is not None or + self.LabelDate is not None or + self.EMCAAccount is not None or + self.HoldForManifest is not None or + self.EELPFC is not None or + self.PriceOptions is not None or + self.Length is not None or + self.Width is not None or + self.Height is not None or + self.Girth is not None or + self.LabelTime is not None or + self.MeterPaymentFlag is not None or + self.ActionCode is not None or + self.OptOutOfSPE is not None or + self.PermitNumber is not None or + self.AccountZipCode is not None or + self.ImportersReferenceType is not None or + self.ImportersTelephoneNumber is not None or + self.ImportersFaxNumber is not None or + self.ImportersEmail is not None or + self.Machinable is not None or + self.DestinationRateIndicator is not None or + self.MID is not None or + self.LogisticsManagerMID is not None or + self.CRID is not None or + self.VendorCode is not None or + self.VendorProductVersionNumber is not None or + self.ePostageMailerReporting is not None or + self.SenderFirstName is not None or + self.SenderLastName is not None or + self.SenderBusinessName is not None or + self.SenderAddress1 is not None or + self.SenderCity is not None or + self.SenderState is not None or + self.SenderZip5 is not None or + self.SenderPhone is not None or + self.SenderEmail is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSExpressMailIntlRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('eVSExpressMailIntlRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'eVSExpressMailIntlRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='eVSExpressMailIntlRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='eVSExpressMailIntlRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='eVSExpressMailIntlRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSExpressMailIntlRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Option is not None: + namespaceprefix_ = self.Option_nsprefix_ + ':' if (UseCapturedNS_ and self.Option_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOption>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Option), input_name='Option')), namespaceprefix_ , eol_)) + if self.Revision is not None: + namespaceprefix_ = self.Revision_nsprefix_ + ':' if (UseCapturedNS_ and self.Revision_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRevision>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Revision), input_name='Revision')), namespaceprefix_ , eol_)) + if self.ImageParameters is not None: + namespaceprefix_ = self.ImageParameters_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageParameters_nsprefix_) else '' + self.ImageParameters.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ImageParameters', pretty_print=pretty_print) + if self.FromFirstName is not None: + namespaceprefix_ = self.FromFirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.FromFirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromFirstName), input_name='FromFirstName')), namespaceprefix_ , eol_)) + if self.FromMiddleInitial is not None: + namespaceprefix_ = self.FromMiddleInitial_nsprefix_ + ':' if (UseCapturedNS_ and self.FromMiddleInitial_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromMiddleInitial>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromMiddleInitial), input_name='FromMiddleInitial')), namespaceprefix_ , eol_)) + if self.FromLastName is not None: + namespaceprefix_ = self.FromLastName_nsprefix_ + ':' if (UseCapturedNS_ and self.FromLastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromLastName), input_name='FromLastName')), namespaceprefix_ , eol_)) + if self.FromFirm is not None: + namespaceprefix_ = self.FromFirm_nsprefix_ + ':' if (UseCapturedNS_ and self.FromFirm_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromFirm>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromFirm), input_name='FromFirm')), namespaceprefix_ , eol_)) + if self.FromAddress1 is not None: + namespaceprefix_ = self.FromAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.FromAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromAddress1), input_name='FromAddress1')), namespaceprefix_ , eol_)) + if self.FromAddress2 is not None: + namespaceprefix_ = self.FromAddress2_nsprefix_ + ':' if (UseCapturedNS_ and self.FromAddress2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromAddress2), input_name='FromAddress2')), namespaceprefix_ , eol_)) + if self.FromUrbanization is not None: + namespaceprefix_ = self.FromUrbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.FromUrbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromUrbanization), input_name='FromUrbanization')), namespaceprefix_ , eol_)) + if self.FromCity is not None: + namespaceprefix_ = self.FromCity_nsprefix_ + ':' if (UseCapturedNS_ and self.FromCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromCity), input_name='FromCity')), namespaceprefix_ , eol_)) + if self.FromState is not None: + namespaceprefix_ = self.FromState_nsprefix_ + ':' if (UseCapturedNS_ and self.FromState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromState), input_name='FromState')), namespaceprefix_ , eol_)) + if self.FromZip5 is not None: + namespaceprefix_ = self.FromZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.FromZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromZip5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromZip5), input_name='FromZip5')), namespaceprefix_ , eol_)) + if self.FromZip4 is not None: + namespaceprefix_ = self.FromZip4_nsprefix_ + ':' if (UseCapturedNS_ and self.FromZip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromZip4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromZip4), input_name='FromZip4')), namespaceprefix_ , eol_)) + if self.FromPhone is not None: + namespaceprefix_ = self.FromPhone_nsprefix_ + ':' if (UseCapturedNS_ and self.FromPhone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromPhone), input_name='FromPhone')), namespaceprefix_ , eol_)) + if self.FromCustomsReference is not None: + namespaceprefix_ = self.FromCustomsReference_nsprefix_ + ':' if (UseCapturedNS_ and self.FromCustomsReference_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromCustomsReference>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromCustomsReference), input_name='FromCustomsReference')), namespaceprefix_ , eol_)) + if self.ToName is not None: + namespaceprefix_ = self.ToName_nsprefix_ + ':' if (UseCapturedNS_ and self.ToName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToName), input_name='ToName')), namespaceprefix_ , eol_)) + if self.ToFirstName is not None: + namespaceprefix_ = self.ToFirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.ToFirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToFirstName), input_name='ToFirstName')), namespaceprefix_ , eol_)) + if self.ToLastName is not None: + namespaceprefix_ = self.ToLastName_nsprefix_ + ':' if (UseCapturedNS_ and self.ToLastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToLastName), input_name='ToLastName')), namespaceprefix_ , eol_)) + if self.ToFirm is not None: + namespaceprefix_ = self.ToFirm_nsprefix_ + ':' if (UseCapturedNS_ and self.ToFirm_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToFirm>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToFirm), input_name='ToFirm')), namespaceprefix_ , eol_)) + if self.ToAddress1 is not None: + namespaceprefix_ = self.ToAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress1), input_name='ToAddress1')), namespaceprefix_ , eol_)) + if self.ToAddress2 is not None: + namespaceprefix_ = self.ToAddress2_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress2), input_name='ToAddress2')), namespaceprefix_ , eol_)) + if self.ToAddress3 is not None: + namespaceprefix_ = self.ToAddress3_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress3_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress3>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress3), input_name='ToAddress3')), namespaceprefix_ , eol_)) + if self.ToCity is not None: + namespaceprefix_ = self.ToCity_nsprefix_ + ':' if (UseCapturedNS_ and self.ToCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToCity), input_name='ToCity')), namespaceprefix_ , eol_)) + if self.ToProvince is not None: + namespaceprefix_ = self.ToProvince_nsprefix_ + ':' if (UseCapturedNS_ and self.ToProvince_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToProvince>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToProvince), input_name='ToProvince')), namespaceprefix_ , eol_)) + if self.ToCountry is not None: + namespaceprefix_ = self.ToCountry_nsprefix_ + ':' if (UseCapturedNS_ and self.ToCountry_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToCountry>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToCountry), input_name='ToCountry')), namespaceprefix_ , eol_)) + if self.ToPostalCode is not None: + namespaceprefix_ = self.ToPostalCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ToPostalCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToPostalCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToPostalCode), input_name='ToPostalCode')), namespaceprefix_ , eol_)) + if self.ToPOBoxFlag is not None: + namespaceprefix_ = self.ToPOBoxFlag_nsprefix_ + ':' if (UseCapturedNS_ and self.ToPOBoxFlag_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToPOBoxFlag>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToPOBoxFlag), input_name='ToPOBoxFlag')), namespaceprefix_ , eol_)) + if self.ToPhone is not None: + namespaceprefix_ = self.ToPhone_nsprefix_ + ':' if (UseCapturedNS_ and self.ToPhone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToPhone), input_name='ToPhone')), namespaceprefix_ , eol_)) + if self.ToFax is not None: + namespaceprefix_ = self.ToFax_nsprefix_ + ':' if (UseCapturedNS_ and self.ToFax_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToFax>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToFax), input_name='ToFax')), namespaceprefix_ , eol_)) + if self.ToEmail is not None: + namespaceprefix_ = self.ToEmail_nsprefix_ + ':' if (UseCapturedNS_ and self.ToEmail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToEmail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToEmail), input_name='ToEmail')), namespaceprefix_ , eol_)) + if self.ImportersReferenceNumber is not None: + namespaceprefix_ = self.ImportersReferenceNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.ImportersReferenceNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImportersReferenceNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImportersReferenceNumber), input_name='ImportersReferenceNumber')), namespaceprefix_ , eol_)) + if self.NonDeliveryOption is not None: + namespaceprefix_ = self.NonDeliveryOption_nsprefix_ + ':' if (UseCapturedNS_ and self.NonDeliveryOption_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNonDeliveryOption>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.NonDeliveryOption), input_name='NonDeliveryOption')), namespaceprefix_ , eol_)) + if self.RedirectName is not None: + namespaceprefix_ = self.RedirectName_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectName), input_name='RedirectName')), namespaceprefix_ , eol_)) + if self.RedirectEmail is not None: + namespaceprefix_ = self.RedirectEmail_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectEmail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectEmail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectEmail), input_name='RedirectEmail')), namespaceprefix_ , eol_)) + if self.RedirectSMS is not None: + namespaceprefix_ = self.RedirectSMS_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectSMS_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectSMS>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectSMS), input_name='RedirectSMS')), namespaceprefix_ , eol_)) + if self.RedirectAddress is not None: + namespaceprefix_ = self.RedirectAddress_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectAddress_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectAddress>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectAddress), input_name='RedirectAddress')), namespaceprefix_ , eol_)) + if self.RedirectCity is not None: + namespaceprefix_ = self.RedirectCity_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectCity), input_name='RedirectCity')), namespaceprefix_ , eol_)) + if self.RedirectState is not None: + namespaceprefix_ = self.RedirectState_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectState), input_name='RedirectState')), namespaceprefix_ , eol_)) + if self.RedirectZipCode is not None: + namespaceprefix_ = self.RedirectZipCode_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectZipCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectZipCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectZipCode), input_name='RedirectZipCode')), namespaceprefix_ , eol_)) + if self.RedirectZip4 is not None: + namespaceprefix_ = self.RedirectZip4_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectZip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectZip4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectZip4), input_name='RedirectZip4')), namespaceprefix_ , eol_)) + if self.Container is not None: + namespaceprefix_ = self.Container_nsprefix_ + ':' if (UseCapturedNS_ and self.Container_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContainer>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Container), input_name='Container')), namespaceprefix_ , eol_)) + if self.ShippingContents is not None: + namespaceprefix_ = self.ShippingContents_nsprefix_ + ':' if (UseCapturedNS_ and self.ShippingContents_nsprefix_) else '' + self.ShippingContents.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ShippingContents', pretty_print=pretty_print) + if self.InsuredNumber is not None: + namespaceprefix_ = self.InsuredNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.InsuredNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInsuredNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InsuredNumber), input_name='InsuredNumber')), namespaceprefix_ , eol_)) + if self.InsuredAmount is not None: + namespaceprefix_ = self.InsuredAmount_nsprefix_ + ':' if (UseCapturedNS_ and self.InsuredAmount_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInsuredAmount>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.InsuredAmount, input_name='InsuredAmount'), namespaceprefix_ , eol_)) + if self.Postage is not None: + namespaceprefix_ = self.Postage_nsprefix_ + ':' if (UseCapturedNS_ and self.Postage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPostage>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Postage), input_name='Postage')), namespaceprefix_ , eol_)) + if self.GrossPounds is not None: + namespaceprefix_ = self.GrossPounds_nsprefix_ + ':' if (UseCapturedNS_ and self.GrossPounds_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGrossPounds>%s%s' % (namespaceprefix_ , self.gds_format_float(self.GrossPounds, input_name='GrossPounds'), namespaceprefix_ , eol_)) + if self.GrossOunces is not None: + namespaceprefix_ = self.GrossOunces_nsprefix_ + ':' if (UseCapturedNS_ and self.GrossOunces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGrossOunces>%s%s' % (namespaceprefix_ , self.gds_format_float(self.GrossOunces, input_name='GrossOunces'), namespaceprefix_ , eol_)) + if self.ContentType is not None: + namespaceprefix_ = self.ContentType_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentType), input_name='ContentType')), namespaceprefix_ , eol_)) + if self.ContentTypeOther is not None: + namespaceprefix_ = self.ContentTypeOther_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentTypeOther_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentTypeOther>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentTypeOther), input_name='ContentTypeOther')), namespaceprefix_ , eol_)) + if self.Agreement is not None: + namespaceprefix_ = self.Agreement_nsprefix_ + ':' if (UseCapturedNS_ and self.Agreement_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAgreement>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Agreement), input_name='Agreement')), namespaceprefix_ , eol_)) + if self.Comments is not None: + namespaceprefix_ = self.Comments_nsprefix_ + ':' if (UseCapturedNS_ and self.Comments_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sComments>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Comments), input_name='Comments')), namespaceprefix_ , eol_)) + if self.LicenseNumber is not None: + namespaceprefix_ = self.LicenseNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.LicenseNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLicenseNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LicenseNumber), input_name='LicenseNumber')), namespaceprefix_ , eol_)) + if self.CertificateNumber is not None: + namespaceprefix_ = self.CertificateNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.CertificateNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCertificateNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CertificateNumber), input_name='CertificateNumber')), namespaceprefix_ , eol_)) + if self.InvoiceNumber is not None: + namespaceprefix_ = self.InvoiceNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.InvoiceNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInvoiceNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InvoiceNumber), input_name='InvoiceNumber')), namespaceprefix_ , eol_)) + if self.ImageType is not None: + namespaceprefix_ = self.ImageType_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageType), input_name='ImageType')), namespaceprefix_ , eol_)) + if self.ImageLayout is not None: + namespaceprefix_ = self.ImageLayout_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageLayout_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageLayout>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageLayout), input_name='ImageLayout')), namespaceprefix_ , eol_)) + if self.CustomerRefNo is not None: + namespaceprefix_ = self.CustomerRefNo_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerRefNo_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerRefNo>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerRefNo), input_name='CustomerRefNo')), namespaceprefix_ , eol_)) + if self.CustomerRefNo2 is not None: + namespaceprefix_ = self.CustomerRefNo2_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerRefNo2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerRefNo2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerRefNo2), input_name='CustomerRefNo2')), namespaceprefix_ , eol_)) + if self.POZipCode is not None: + namespaceprefix_ = self.POZipCode_nsprefix_ + ':' if (UseCapturedNS_ and self.POZipCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPOZipCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.POZipCode), input_name='POZipCode')), namespaceprefix_ , eol_)) + if self.LabelDate is not None: + namespaceprefix_ = self.LabelDate_nsprefix_ + ':' if (UseCapturedNS_ and self.LabelDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLabelDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LabelDate), input_name='LabelDate')), namespaceprefix_ , eol_)) + if self.EMCAAccount is not None: + namespaceprefix_ = self.EMCAAccount_nsprefix_ + ':' if (UseCapturedNS_ and self.EMCAAccount_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEMCAAccount>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EMCAAccount), input_name='EMCAAccount')), namespaceprefix_ , eol_)) + if self.HoldForManifest is not None: + namespaceprefix_ = self.HoldForManifest_nsprefix_ + ':' if (UseCapturedNS_ and self.HoldForManifest_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHoldForManifest>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.HoldForManifest), input_name='HoldForManifest')), namespaceprefix_ , eol_)) + if self.EELPFC is not None: + namespaceprefix_ = self.EELPFC_nsprefix_ + ':' if (UseCapturedNS_ and self.EELPFC_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEELPFC>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EELPFC), input_name='EELPFC')), namespaceprefix_ , eol_)) + if self.PriceOptions is not None: + namespaceprefix_ = self.PriceOptions_nsprefix_ + ':' if (UseCapturedNS_ and self.PriceOptions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPriceOptions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PriceOptions), input_name='PriceOptions')), namespaceprefix_ , eol_)) + if self.Length is not None: + namespaceprefix_ = self.Length_nsprefix_ + ':' if (UseCapturedNS_ and self.Length_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLength>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Length, input_name='Length'), namespaceprefix_ , eol_)) + if self.Width is not None: + namespaceprefix_ = self.Width_nsprefix_ + ':' if (UseCapturedNS_ and self.Width_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sWidth>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Width, input_name='Width'), namespaceprefix_ , eol_)) + if self.Height is not None: + namespaceprefix_ = self.Height_nsprefix_ + ':' if (UseCapturedNS_ and self.Height_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHeight>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Height, input_name='Height'), namespaceprefix_ , eol_)) + if self.Girth is not None: + namespaceprefix_ = self.Girth_nsprefix_ + ':' if (UseCapturedNS_ and self.Girth_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGirth>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Girth, input_name='Girth'), namespaceprefix_ , eol_)) + if self.LabelTime is not None: + namespaceprefix_ = self.LabelTime_nsprefix_ + ':' if (UseCapturedNS_ and self.LabelTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLabelTime>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LabelTime), input_name='LabelTime')), namespaceprefix_ , eol_)) + if self.MeterPaymentFlag is not None: + namespaceprefix_ = self.MeterPaymentFlag_nsprefix_ + ':' if (UseCapturedNS_ and self.MeterPaymentFlag_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMeterPaymentFlag>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MeterPaymentFlag), input_name='MeterPaymentFlag')), namespaceprefix_ , eol_)) + if self.ActionCode is not None: + namespaceprefix_ = self.ActionCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ActionCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sActionCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ActionCode), input_name='ActionCode')), namespaceprefix_ , eol_)) + if self.OptOutOfSPE is not None: + namespaceprefix_ = self.OptOutOfSPE_nsprefix_ + ':' if (UseCapturedNS_ and self.OptOutOfSPE_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOptOutOfSPE>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.OptOutOfSPE, input_name='OptOutOfSPE'), namespaceprefix_ , eol_)) + if self.PermitNumber is not None: + namespaceprefix_ = self.PermitNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PermitNumber), input_name='PermitNumber')), namespaceprefix_ , eol_)) + if self.AccountZipCode is not None: + namespaceprefix_ = self.AccountZipCode_nsprefix_ + ':' if (UseCapturedNS_ and self.AccountZipCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAccountZipCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AccountZipCode), input_name='AccountZipCode')), namespaceprefix_ , eol_)) + if self.ImportersReferenceType is not None: + namespaceprefix_ = self.ImportersReferenceType_nsprefix_ + ':' if (UseCapturedNS_ and self.ImportersReferenceType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImportersReferenceType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImportersReferenceType), input_name='ImportersReferenceType')), namespaceprefix_ , eol_)) + if self.ImportersTelephoneNumber is not None: + namespaceprefix_ = self.ImportersTelephoneNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.ImportersTelephoneNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImportersTelephoneNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImportersTelephoneNumber), input_name='ImportersTelephoneNumber')), namespaceprefix_ , eol_)) + if self.ImportersFaxNumber is not None: + namespaceprefix_ = self.ImportersFaxNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.ImportersFaxNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImportersFaxNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImportersFaxNumber), input_name='ImportersFaxNumber')), namespaceprefix_ , eol_)) + if self.ImportersEmail is not None: + namespaceprefix_ = self.ImportersEmail_nsprefix_ + ':' if (UseCapturedNS_ and self.ImportersEmail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImportersEmail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImportersEmail), input_name='ImportersEmail')), namespaceprefix_ , eol_)) + if self.Machinable is not None: + namespaceprefix_ = self.Machinable_nsprefix_ + ':' if (UseCapturedNS_ and self.Machinable_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMachinable>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.Machinable, input_name='Machinable'), namespaceprefix_ , eol_)) + if self.DestinationRateIndicator is not None: + namespaceprefix_ = self.DestinationRateIndicator_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationRateIndicator_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationRateIndicator>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DestinationRateIndicator), input_name='DestinationRateIndicator')), namespaceprefix_ , eol_)) + if self.MID is not None: + namespaceprefix_ = self.MID_nsprefix_ + ':' if (UseCapturedNS_ and self.MID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MID), input_name='MID')), namespaceprefix_ , eol_)) + if self.LogisticsManagerMID is not None: + namespaceprefix_ = self.LogisticsManagerMID_nsprefix_ + ':' if (UseCapturedNS_ and self.LogisticsManagerMID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLogisticsManagerMID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LogisticsManagerMID), input_name='LogisticsManagerMID')), namespaceprefix_ , eol_)) + if self.CRID is not None: + namespaceprefix_ = self.CRID_nsprefix_ + ':' if (UseCapturedNS_ and self.CRID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCRID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CRID), input_name='CRID')), namespaceprefix_ , eol_)) + if self.VendorCode is not None: + namespaceprefix_ = self.VendorCode_nsprefix_ + ':' if (UseCapturedNS_ and self.VendorCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sVendorCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.VendorCode), input_name='VendorCode')), namespaceprefix_ , eol_)) + if self.VendorProductVersionNumber is not None: + namespaceprefix_ = self.VendorProductVersionNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.VendorProductVersionNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sVendorProductVersionNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.VendorProductVersionNumber), input_name='VendorProductVersionNumber')), namespaceprefix_ , eol_)) + if self.ePostageMailerReporting is not None: + namespaceprefix_ = self.ePostageMailerReporting_nsprefix_ + ':' if (UseCapturedNS_ and self.ePostageMailerReporting_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sePostageMailerReporting>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ePostageMailerReporting), input_name='ePostageMailerReporting')), namespaceprefix_ , eol_)) + if self.SenderFirstName is not None: + namespaceprefix_ = self.SenderFirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderFirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderFirstName), input_name='SenderFirstName')), namespaceprefix_ , eol_)) + if self.SenderLastName is not None: + namespaceprefix_ = self.SenderLastName_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderLastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderLastName), input_name='SenderLastName')), namespaceprefix_ , eol_)) + if self.SenderBusinessName is not None: + namespaceprefix_ = self.SenderBusinessName_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderBusinessName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderBusinessName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderBusinessName), input_name='SenderBusinessName')), namespaceprefix_ , eol_)) + if self.SenderAddress1 is not None: + namespaceprefix_ = self.SenderAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderAddress1), input_name='SenderAddress1')), namespaceprefix_ , eol_)) + if self.SenderCity is not None: + namespaceprefix_ = self.SenderCity_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderCity), input_name='SenderCity')), namespaceprefix_ , eol_)) + if self.SenderState is not None: + namespaceprefix_ = self.SenderState_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderState), input_name='SenderState')), namespaceprefix_ , eol_)) + if self.SenderZip5 is not None: + namespaceprefix_ = self.SenderZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderZip5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderZip5), input_name='SenderZip5')), namespaceprefix_ , eol_)) + if self.SenderPhone is not None: + namespaceprefix_ = self.SenderPhone_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderPhone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderPhone), input_name='SenderPhone')), namespaceprefix_ , eol_)) + if self.SenderEmail is not None: + namespaceprefix_ = self.SenderEmail_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderEmail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderEmail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderEmail), input_name='SenderEmail')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Option': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Option') + value_ = self.gds_validate_string(value_, node, 'Option') + self.Option = value_ + self.Option_nsprefix_ = child_.prefix + elif nodeName_ == 'Revision': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Revision') + value_ = self.gds_validate_string(value_, node, 'Revision') + self.Revision = value_ + self.Revision_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageParameters': + obj_ = ImageParametersType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ImageParameters = obj_ + obj_.original_tagname_ = 'ImageParameters' + elif nodeName_ == 'FromFirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromFirstName') + value_ = self.gds_validate_string(value_, node, 'FromFirstName') + self.FromFirstName = value_ + self.FromFirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'FromMiddleInitial': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromMiddleInitial') + value_ = self.gds_validate_string(value_, node, 'FromMiddleInitial') + self.FromMiddleInitial = value_ + self.FromMiddleInitial_nsprefix_ = child_.prefix + elif nodeName_ == 'FromLastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromLastName') + value_ = self.gds_validate_string(value_, node, 'FromLastName') + self.FromLastName = value_ + self.FromLastName_nsprefix_ = child_.prefix + elif nodeName_ == 'FromFirm': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromFirm') + value_ = self.gds_validate_string(value_, node, 'FromFirm') + self.FromFirm = value_ + self.FromFirm_nsprefix_ = child_.prefix + elif nodeName_ == 'FromAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromAddress1') + value_ = self.gds_validate_string(value_, node, 'FromAddress1') + self.FromAddress1 = value_ + self.FromAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'FromAddress2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromAddress2') + value_ = self.gds_validate_string(value_, node, 'FromAddress2') + self.FromAddress2 = value_ + self.FromAddress2_nsprefix_ = child_.prefix + elif nodeName_ == 'FromUrbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromUrbanization') + value_ = self.gds_validate_string(value_, node, 'FromUrbanization') + self.FromUrbanization = value_ + self.FromUrbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'FromCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromCity') + value_ = self.gds_validate_string(value_, node, 'FromCity') + self.FromCity = value_ + self.FromCity_nsprefix_ = child_.prefix + elif nodeName_ == 'FromState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromState') + value_ = self.gds_validate_string(value_, node, 'FromState') + self.FromState = value_ + self.FromState_nsprefix_ = child_.prefix + elif nodeName_ == 'FromZip5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromZip5') + value_ = self.gds_validate_string(value_, node, 'FromZip5') + self.FromZip5 = value_ + self.FromZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'FromZip4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromZip4') + value_ = self.gds_validate_string(value_, node, 'FromZip4') + self.FromZip4 = value_ + self.FromZip4_nsprefix_ = child_.prefix + elif nodeName_ == 'FromPhone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromPhone') + value_ = self.gds_validate_string(value_, node, 'FromPhone') + self.FromPhone = value_ + self.FromPhone_nsprefix_ = child_.prefix + elif nodeName_ == 'FromCustomsReference': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromCustomsReference') + value_ = self.gds_validate_string(value_, node, 'FromCustomsReference') + self.FromCustomsReference = value_ + self.FromCustomsReference_nsprefix_ = child_.prefix + elif nodeName_ == 'ToName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToName') + value_ = self.gds_validate_string(value_, node, 'ToName') + self.ToName = value_ + self.ToName_nsprefix_ = child_.prefix + elif nodeName_ == 'ToFirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToFirstName') + value_ = self.gds_validate_string(value_, node, 'ToFirstName') + self.ToFirstName = value_ + self.ToFirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'ToLastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToLastName') + value_ = self.gds_validate_string(value_, node, 'ToLastName') + self.ToLastName = value_ + self.ToLastName_nsprefix_ = child_.prefix + elif nodeName_ == 'ToFirm': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToFirm') + value_ = self.gds_validate_string(value_, node, 'ToFirm') + self.ToFirm = value_ + self.ToFirm_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress1') + value_ = self.gds_validate_string(value_, node, 'ToAddress1') + self.ToAddress1 = value_ + self.ToAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress2') + value_ = self.gds_validate_string(value_, node, 'ToAddress2') + self.ToAddress2 = value_ + self.ToAddress2_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress3': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress3') + value_ = self.gds_validate_string(value_, node, 'ToAddress3') + self.ToAddress3 = value_ + self.ToAddress3_nsprefix_ = child_.prefix + elif nodeName_ == 'ToCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToCity') + value_ = self.gds_validate_string(value_, node, 'ToCity') + self.ToCity = value_ + self.ToCity_nsprefix_ = child_.prefix + elif nodeName_ == 'ToProvince': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToProvince') + value_ = self.gds_validate_string(value_, node, 'ToProvince') + self.ToProvince = value_ + self.ToProvince_nsprefix_ = child_.prefix + elif nodeName_ == 'ToCountry': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToCountry') + value_ = self.gds_validate_string(value_, node, 'ToCountry') + self.ToCountry = value_ + self.ToCountry_nsprefix_ = child_.prefix + elif nodeName_ == 'ToPostalCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToPostalCode') + value_ = self.gds_validate_string(value_, node, 'ToPostalCode') + self.ToPostalCode = value_ + self.ToPostalCode_nsprefix_ = child_.prefix + elif nodeName_ == 'ToPOBoxFlag': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToPOBoxFlag') + value_ = self.gds_validate_string(value_, node, 'ToPOBoxFlag') + self.ToPOBoxFlag = value_ + self.ToPOBoxFlag_nsprefix_ = child_.prefix + elif nodeName_ == 'ToPhone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToPhone') + value_ = self.gds_validate_string(value_, node, 'ToPhone') + self.ToPhone = value_ + self.ToPhone_nsprefix_ = child_.prefix + elif nodeName_ == 'ToFax': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToFax') + value_ = self.gds_validate_string(value_, node, 'ToFax') + self.ToFax = value_ + self.ToFax_nsprefix_ = child_.prefix + elif nodeName_ == 'ToEmail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToEmail') + value_ = self.gds_validate_string(value_, node, 'ToEmail') + self.ToEmail = value_ + self.ToEmail_nsprefix_ = child_.prefix + elif nodeName_ == 'ImportersReferenceNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImportersReferenceNumber') + value_ = self.gds_validate_string(value_, node, 'ImportersReferenceNumber') + self.ImportersReferenceNumber = value_ + self.ImportersReferenceNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'NonDeliveryOption': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'NonDeliveryOption') + value_ = self.gds_validate_string(value_, node, 'NonDeliveryOption') + self.NonDeliveryOption = value_ + self.NonDeliveryOption_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectName') + value_ = self.gds_validate_string(value_, node, 'RedirectName') + self.RedirectName = value_ + self.RedirectName_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectEmail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectEmail') + value_ = self.gds_validate_string(value_, node, 'RedirectEmail') + self.RedirectEmail = value_ + self.RedirectEmail_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectSMS': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectSMS') + value_ = self.gds_validate_string(value_, node, 'RedirectSMS') + self.RedirectSMS = value_ + self.RedirectSMS_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectAddress': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectAddress') + value_ = self.gds_validate_string(value_, node, 'RedirectAddress') + self.RedirectAddress = value_ + self.RedirectAddress_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectCity') + value_ = self.gds_validate_string(value_, node, 'RedirectCity') + self.RedirectCity = value_ + self.RedirectCity_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectState') + value_ = self.gds_validate_string(value_, node, 'RedirectState') + self.RedirectState = value_ + self.RedirectState_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectZipCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectZipCode') + value_ = self.gds_validate_string(value_, node, 'RedirectZipCode') + self.RedirectZipCode = value_ + self.RedirectZipCode_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectZip4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectZip4') + value_ = self.gds_validate_string(value_, node, 'RedirectZip4') + self.RedirectZip4 = value_ + self.RedirectZip4_nsprefix_ = child_.prefix + elif nodeName_ == 'Container': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Container') + value_ = self.gds_validate_string(value_, node, 'Container') + self.Container = value_ + self.Container_nsprefix_ = child_.prefix + elif nodeName_ == 'ShippingContents': + obj_ = ShippingContentsType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ShippingContents = obj_ + obj_.original_tagname_ = 'ShippingContents' + elif nodeName_ == 'InsuredNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'InsuredNumber') + value_ = self.gds_validate_string(value_, node, 'InsuredNumber') + self.InsuredNumber = value_ + self.InsuredNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'InsuredAmount' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'InsuredAmount') + fval_ = self.gds_validate_decimal(fval_, node, 'InsuredAmount') + self.InsuredAmount = fval_ + self.InsuredAmount_nsprefix_ = child_.prefix + elif nodeName_ == 'Postage': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Postage') + value_ = self.gds_validate_string(value_, node, 'Postage') + self.Postage = value_ + self.Postage_nsprefix_ = child_.prefix + elif nodeName_ == 'GrossPounds' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'GrossPounds') + fval_ = self.gds_validate_float(fval_, node, 'GrossPounds') + self.GrossPounds = fval_ + self.GrossPounds_nsprefix_ = child_.prefix + elif nodeName_ == 'GrossOunces' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'GrossOunces') + fval_ = self.gds_validate_float(fval_, node, 'GrossOunces') + self.GrossOunces = fval_ + self.GrossOunces_nsprefix_ = child_.prefix + elif nodeName_ == 'ContentType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentType') + value_ = self.gds_validate_string(value_, node, 'ContentType') + self.ContentType = value_ + self.ContentType_nsprefix_ = child_.prefix + elif nodeName_ == 'ContentTypeOther': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentTypeOther') + value_ = self.gds_validate_string(value_, node, 'ContentTypeOther') + self.ContentTypeOther = value_ + self.ContentTypeOther_nsprefix_ = child_.prefix + elif nodeName_ == 'Agreement': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Agreement') + value_ = self.gds_validate_string(value_, node, 'Agreement') + self.Agreement = value_ + self.Agreement_nsprefix_ = child_.prefix + elif nodeName_ == 'Comments': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Comments') + value_ = self.gds_validate_string(value_, node, 'Comments') + self.Comments = value_ + self.Comments_nsprefix_ = child_.prefix + elif nodeName_ == 'LicenseNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LicenseNumber') + value_ = self.gds_validate_string(value_, node, 'LicenseNumber') + self.LicenseNumber = value_ + self.LicenseNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'CertificateNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CertificateNumber') + value_ = self.gds_validate_string(value_, node, 'CertificateNumber') + self.CertificateNumber = value_ + self.CertificateNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'InvoiceNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'InvoiceNumber') + value_ = self.gds_validate_string(value_, node, 'InvoiceNumber') + self.InvoiceNumber = value_ + self.InvoiceNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageType') + value_ = self.gds_validate_string(value_, node, 'ImageType') + self.ImageType = value_ + self.ImageType_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageLayout': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageLayout') + value_ = self.gds_validate_string(value_, node, 'ImageLayout') + self.ImageLayout = value_ + self.ImageLayout_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerRefNo': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerRefNo') + value_ = self.gds_validate_string(value_, node, 'CustomerRefNo') + self.CustomerRefNo = value_ + self.CustomerRefNo_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerRefNo2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerRefNo2') + value_ = self.gds_validate_string(value_, node, 'CustomerRefNo2') + self.CustomerRefNo2 = value_ + self.CustomerRefNo2_nsprefix_ = child_.prefix + elif nodeName_ == 'POZipCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'POZipCode') + value_ = self.gds_validate_string(value_, node, 'POZipCode') + self.POZipCode = value_ + self.POZipCode_nsprefix_ = child_.prefix + elif nodeName_ == 'LabelDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LabelDate') + value_ = self.gds_validate_string(value_, node, 'LabelDate') + self.LabelDate = value_ + self.LabelDate_nsprefix_ = child_.prefix + elif nodeName_ == 'EMCAAccount': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EMCAAccount') + value_ = self.gds_validate_string(value_, node, 'EMCAAccount') + self.EMCAAccount = value_ + self.EMCAAccount_nsprefix_ = child_.prefix + elif nodeName_ == 'HoldForManifest': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'HoldForManifest') + value_ = self.gds_validate_string(value_, node, 'HoldForManifest') + self.HoldForManifest = value_ + self.HoldForManifest_nsprefix_ = child_.prefix + elif nodeName_ == 'EELPFC': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EELPFC') + value_ = self.gds_validate_string(value_, node, 'EELPFC') + self.EELPFC = value_ + self.EELPFC_nsprefix_ = child_.prefix + elif nodeName_ == 'PriceOptions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PriceOptions') + value_ = self.gds_validate_string(value_, node, 'PriceOptions') + self.PriceOptions = value_ + self.PriceOptions_nsprefix_ = child_.prefix + elif nodeName_ == 'Length' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Length') + fval_ = self.gds_validate_float(fval_, node, 'Length') + self.Length = fval_ + self.Length_nsprefix_ = child_.prefix + elif nodeName_ == 'Width' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Width') + fval_ = self.gds_validate_float(fval_, node, 'Width') + self.Width = fval_ + self.Width_nsprefix_ = child_.prefix + elif nodeName_ == 'Height' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Height') + fval_ = self.gds_validate_float(fval_, node, 'Height') + self.Height = fval_ + self.Height_nsprefix_ = child_.prefix + elif nodeName_ == 'Girth' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Girth') + fval_ = self.gds_validate_float(fval_, node, 'Girth') + self.Girth = fval_ + self.Girth_nsprefix_ = child_.prefix + elif nodeName_ == 'LabelTime': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LabelTime') + value_ = self.gds_validate_string(value_, node, 'LabelTime') + self.LabelTime = value_ + self.LabelTime_nsprefix_ = child_.prefix + elif nodeName_ == 'MeterPaymentFlag': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'MeterPaymentFlag') + value_ = self.gds_validate_string(value_, node, 'MeterPaymentFlag') + self.MeterPaymentFlag = value_ + self.MeterPaymentFlag_nsprefix_ = child_.prefix + elif nodeName_ == 'ActionCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ActionCode') + value_ = self.gds_validate_string(value_, node, 'ActionCode') + self.ActionCode = value_ + self.ActionCode_nsprefix_ = child_.prefix + elif nodeName_ == 'OptOutOfSPE': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'OptOutOfSPE') + ival_ = self.gds_validate_boolean(ival_, node, 'OptOutOfSPE') + self.OptOutOfSPE = ival_ + self.OptOutOfSPE_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PermitNumber') + value_ = self.gds_validate_string(value_, node, 'PermitNumber') + self.PermitNumber = value_ + self.PermitNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'AccountZipCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AccountZipCode') + value_ = self.gds_validate_string(value_, node, 'AccountZipCode') + self.AccountZipCode = value_ + self.AccountZipCode_nsprefix_ = child_.prefix + elif nodeName_ == 'ImportersReferenceType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImportersReferenceType') + value_ = self.gds_validate_string(value_, node, 'ImportersReferenceType') + self.ImportersReferenceType = value_ + self.ImportersReferenceType_nsprefix_ = child_.prefix + elif nodeName_ == 'ImportersTelephoneNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImportersTelephoneNumber') + value_ = self.gds_validate_string(value_, node, 'ImportersTelephoneNumber') + self.ImportersTelephoneNumber = value_ + self.ImportersTelephoneNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'ImportersFaxNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImportersFaxNumber') + value_ = self.gds_validate_string(value_, node, 'ImportersFaxNumber') + self.ImportersFaxNumber = value_ + self.ImportersFaxNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'ImportersEmail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImportersEmail') + value_ = self.gds_validate_string(value_, node, 'ImportersEmail') + self.ImportersEmail = value_ + self.ImportersEmail_nsprefix_ = child_.prefix + elif nodeName_ == 'Machinable': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'Machinable') + ival_ = self.gds_validate_boolean(ival_, node, 'Machinable') + self.Machinable = ival_ + self.Machinable_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationRateIndicator': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DestinationRateIndicator') + value_ = self.gds_validate_string(value_, node, 'DestinationRateIndicator') + self.DestinationRateIndicator = value_ + self.DestinationRateIndicator_nsprefix_ = child_.prefix + elif nodeName_ == 'MID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'MID') + value_ = self.gds_validate_string(value_, node, 'MID') + self.MID = value_ + self.MID_nsprefix_ = child_.prefix + elif nodeName_ == 'LogisticsManagerMID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LogisticsManagerMID') + value_ = self.gds_validate_string(value_, node, 'LogisticsManagerMID') + self.LogisticsManagerMID = value_ + self.LogisticsManagerMID_nsprefix_ = child_.prefix + elif nodeName_ == 'CRID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CRID') + value_ = self.gds_validate_string(value_, node, 'CRID') + self.CRID = value_ + self.CRID_nsprefix_ = child_.prefix + elif nodeName_ == 'VendorCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'VendorCode') + value_ = self.gds_validate_string(value_, node, 'VendorCode') + self.VendorCode = value_ + self.VendorCode_nsprefix_ = child_.prefix + elif nodeName_ == 'VendorProductVersionNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'VendorProductVersionNumber') + value_ = self.gds_validate_string(value_, node, 'VendorProductVersionNumber') + self.VendorProductVersionNumber = value_ + self.VendorProductVersionNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'ePostageMailerReporting': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ePostageMailerReporting') + value_ = self.gds_validate_string(value_, node, 'ePostageMailerReporting') + self.ePostageMailerReporting = value_ + self.ePostageMailerReporting_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderFirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderFirstName') + value_ = self.gds_validate_string(value_, node, 'SenderFirstName') + self.SenderFirstName = value_ + self.SenderFirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderLastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderLastName') + value_ = self.gds_validate_string(value_, node, 'SenderLastName') + self.SenderLastName = value_ + self.SenderLastName_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderBusinessName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderBusinessName') + value_ = self.gds_validate_string(value_, node, 'SenderBusinessName') + self.SenderBusinessName = value_ + self.SenderBusinessName_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderAddress1') + value_ = self.gds_validate_string(value_, node, 'SenderAddress1') + self.SenderAddress1 = value_ + self.SenderAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderCity') + value_ = self.gds_validate_string(value_, node, 'SenderCity') + self.SenderCity = value_ + self.SenderCity_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderState') + value_ = self.gds_validate_string(value_, node, 'SenderState') + self.SenderState = value_ + self.SenderState_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderZip5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderZip5') + value_ = self.gds_validate_string(value_, node, 'SenderZip5') + self.SenderZip5 = value_ + self.SenderZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderPhone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderPhone') + value_ = self.gds_validate_string(value_, node, 'SenderPhone') + self.SenderPhone = value_ + self.SenderPhone_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderEmail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderEmail') + value_ = self.gds_validate_string(value_, node, 'SenderEmail') + self.SenderEmail = value_ + self.SenderEmail_nsprefix_ = child_.prefix +# end class eVSExpressMailIntlRequest + + +class ImageParametersType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ImageParameter=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ImageParameter = ImageParameter + self.ImageParameter_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ImageParametersType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ImageParametersType.subclass: + return ImageParametersType.subclass(*args_, **kwargs_) + else: + return ImageParametersType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ImageParameter(self): + return self.ImageParameter + def set_ImageParameter(self, ImageParameter): + self.ImageParameter = ImageParameter + def has__content(self): + if ( + self.ImageParameter is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ImageParametersType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ImageParametersType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ImageParametersType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ImageParametersType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ImageParametersType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ImageParametersType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ImageParametersType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ImageParameter is not None: + namespaceprefix_ = self.ImageParameter_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageParameter_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageParameter>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageParameter), input_name='ImageParameter')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ImageParameter': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageParameter') + value_ = self.gds_validate_string(value_, node, 'ImageParameter') + self.ImageParameter = value_ + self.ImageParameter_nsprefix_ = child_.prefix +# end class ImageParametersType + + +class ShippingContentsType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ItemDetail=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if ItemDetail is None: + self.ItemDetail = [] + else: + self.ItemDetail = ItemDetail + self.ItemDetail_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ShippingContentsType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ShippingContentsType.subclass: + return ShippingContentsType.subclass(*args_, **kwargs_) + else: + return ShippingContentsType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ItemDetail(self): + return self.ItemDetail + def set_ItemDetail(self, ItemDetail): + self.ItemDetail = ItemDetail + def add_ItemDetail(self, value): + self.ItemDetail.append(value) + def insert_ItemDetail_at(self, index, value): + self.ItemDetail.insert(index, value) + def replace_ItemDetail_at(self, index, value): + self.ItemDetail[index] = value + def has__content(self): + if ( + self.ItemDetail + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ShippingContentsType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ShippingContentsType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ShippingContentsType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ShippingContentsType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ShippingContentsType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ShippingContentsType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ShippingContentsType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for ItemDetail_ in self.ItemDetail: + namespaceprefix_ = self.ItemDetail_nsprefix_ + ':' if (UseCapturedNS_ and self.ItemDetail_nsprefix_) else '' + ItemDetail_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ItemDetail', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ItemDetail': + obj_ = ItemDetailType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ItemDetail.append(obj_) + obj_.original_tagname_ = 'ItemDetail' +# end class ShippingContentsType + + +class ItemDetailType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Description=None, Quantity=None, Value=None, NetPounds=None, NetOunces=None, HSTariffNumber=None, CountryOfOrigin=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Description = Description + self.Description_nsprefix_ = None + self.Quantity = Quantity + self.Quantity_nsprefix_ = None + self.Value = Value + self.Value_nsprefix_ = None + self.NetPounds = NetPounds + self.NetPounds_nsprefix_ = None + self.NetOunces = NetOunces + self.NetOunces_nsprefix_ = None + self.HSTariffNumber = HSTariffNumber + self.HSTariffNumber_nsprefix_ = None + self.CountryOfOrigin = CountryOfOrigin + self.CountryOfOrigin_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ItemDetailType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ItemDetailType.subclass: + return ItemDetailType.subclass(*args_, **kwargs_) + else: + return ItemDetailType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Description(self): + return self.Description + def set_Description(self, Description): + self.Description = Description + def get_Quantity(self): + return self.Quantity + def set_Quantity(self, Quantity): + self.Quantity = Quantity + def get_Value(self): + return self.Value + def set_Value(self, Value): + self.Value = Value + def get_NetPounds(self): + return self.NetPounds + def set_NetPounds(self, NetPounds): + self.NetPounds = NetPounds + def get_NetOunces(self): + return self.NetOunces + def set_NetOunces(self, NetOunces): + self.NetOunces = NetOunces + def get_HSTariffNumber(self): + return self.HSTariffNumber + def set_HSTariffNumber(self, HSTariffNumber): + self.HSTariffNumber = HSTariffNumber + def get_CountryOfOrigin(self): + return self.CountryOfOrigin + def set_CountryOfOrigin(self, CountryOfOrigin): + self.CountryOfOrigin = CountryOfOrigin + def has__content(self): + if ( + self.Description is not None or + self.Quantity is not None or + self.Value is not None or + self.NetPounds is not None or + self.NetOunces is not None or + self.HSTariffNumber is not None or + self.CountryOfOrigin is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ItemDetailType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ItemDetailType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ItemDetailType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ItemDetailType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ItemDetailType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ItemDetailType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ItemDetailType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Description is not None: + namespaceprefix_ = self.Description_nsprefix_ + ':' if (UseCapturedNS_ and self.Description_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDescription>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Description), input_name='Description')), namespaceprefix_ , eol_)) + if self.Quantity is not None: + namespaceprefix_ = self.Quantity_nsprefix_ + ':' if (UseCapturedNS_ and self.Quantity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sQuantity>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Quantity, input_name='Quantity'), namespaceprefix_ , eol_)) + if self.Value is not None: + namespaceprefix_ = self.Value_nsprefix_ + ':' if (UseCapturedNS_ and self.Value_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sValue>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Value, input_name='Value'), namespaceprefix_ , eol_)) + if self.NetPounds is not None: + namespaceprefix_ = self.NetPounds_nsprefix_ + ':' if (UseCapturedNS_ and self.NetPounds_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNetPounds>%s%s' % (namespaceprefix_ , self.gds_format_float(self.NetPounds, input_name='NetPounds'), namespaceprefix_ , eol_)) + if self.NetOunces is not None: + namespaceprefix_ = self.NetOunces_nsprefix_ + ':' if (UseCapturedNS_ and self.NetOunces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNetOunces>%s%s' % (namespaceprefix_ , self.gds_format_float(self.NetOunces, input_name='NetOunces'), namespaceprefix_ , eol_)) + if self.HSTariffNumber is not None: + namespaceprefix_ = self.HSTariffNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.HSTariffNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHSTariffNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.HSTariffNumber), input_name='HSTariffNumber')), namespaceprefix_ , eol_)) + if self.CountryOfOrigin is not None: + namespaceprefix_ = self.CountryOfOrigin_nsprefix_ + ':' if (UseCapturedNS_ and self.CountryOfOrigin_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCountryOfOrigin>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CountryOfOrigin), input_name='CountryOfOrigin')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Description': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Description') + value_ = self.gds_validate_string(value_, node, 'Description') + self.Description = value_ + self.Description_nsprefix_ = child_.prefix + elif nodeName_ == 'Quantity' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Quantity') + ival_ = self.gds_validate_integer(ival_, node, 'Quantity') + self.Quantity = ival_ + self.Quantity_nsprefix_ = child_.prefix + elif nodeName_ == 'Value' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Value') + fval_ = self.gds_validate_float(fval_, node, 'Value') + self.Value = fval_ + self.Value_nsprefix_ = child_.prefix + elif nodeName_ == 'NetPounds' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'NetPounds') + fval_ = self.gds_validate_float(fval_, node, 'NetPounds') + self.NetPounds = fval_ + self.NetPounds_nsprefix_ = child_.prefix + elif nodeName_ == 'NetOunces' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'NetOunces') + fval_ = self.gds_validate_float(fval_, node, 'NetOunces') + self.NetOunces = fval_ + self.NetOunces_nsprefix_ = child_.prefix + elif nodeName_ == 'HSTariffNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'HSTariffNumber') + value_ = self.gds_validate_string(value_, node, 'HSTariffNumber') + self.HSTariffNumber = value_ + self.HSTariffNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'CountryOfOrigin': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CountryOfOrigin') + value_ = self.gds_validate_string(value_, node, 'CountryOfOrigin') + self.CountryOfOrigin = value_ + self.CountryOfOrigin_nsprefix_ = child_.prefix +# end class ItemDetailType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSExpressMailIntlRequest' + rootClass = eVSExpressMailIntlRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSExpressMailIntlRequest' + rootClass = eVSExpressMailIntlRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSExpressMailIntlRequest' + rootClass = eVSExpressMailIntlRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSExpressMailIntlRequest' + rootClass = eVSExpressMailIntlRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from evs_express_mail_intl_request import *\n\n') + sys.stdout.write('import evs_express_mail_intl_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "ImageParametersType", + "ItemDetailType", + "ShippingContentsType", + "eVSExpressMailIntlRequest" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/evs_express_mail_intl_response.py b/modules/connectors/usps_international/karrio/schemas/usps_international/evs_express_mail_intl_response.py new file mode 100644 index 0000000000..819ecc9cd1 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/evs_express_mail_intl_response.py @@ -0,0 +1,1600 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:11 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/evs_express_mail_intl_response.py') +# +# Command line arguments: +# ./schemas/eVSExpressMailIntlResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/evs_express_mail_intl_response.py" ./schemas/eVSExpressMailIntlResponse.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class eVSExpressMailIntlResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Postage=None, TotalValue=None, SDRValue=None, BarcodeNumber=None, LabelImage=None, Page2Image=None, Page3Image=None, Page4Image=None, Page5Image=None, Page6Image=None, Prohibitions=None, Restrictions=None, Observations=None, Regulations=None, AdditionalRestrictions=None, InsuranceFee=None, GuaranteeAvailability=None, RemainingBarcodes=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Postage = Postage + self.Postage_nsprefix_ = None + self.TotalValue = TotalValue + self.TotalValue_nsprefix_ = None + self.SDRValue = SDRValue + self.SDRValue_nsprefix_ = None + self.BarcodeNumber = BarcodeNumber + self.BarcodeNumber_nsprefix_ = None + self.LabelImage = LabelImage + self.LabelImage_nsprefix_ = None + self.Page2Image = Page2Image + self.Page2Image_nsprefix_ = None + self.Page3Image = Page3Image + self.Page3Image_nsprefix_ = None + self.Page4Image = Page4Image + self.Page4Image_nsprefix_ = None + self.Page5Image = Page5Image + self.Page5Image_nsprefix_ = None + self.Page6Image = Page6Image + self.Page6Image_nsprefix_ = None + self.Prohibitions = Prohibitions + self.Prohibitions_nsprefix_ = None + self.Restrictions = Restrictions + self.Restrictions_nsprefix_ = None + self.Observations = Observations + self.Observations_nsprefix_ = None + self.Regulations = Regulations + self.Regulations_nsprefix_ = None + self.AdditionalRestrictions = AdditionalRestrictions + self.AdditionalRestrictions_nsprefix_ = None + self.InsuranceFee = InsuranceFee + self.InsuranceFee_nsprefix_ = None + self.GuaranteeAvailability = GuaranteeAvailability + self.GuaranteeAvailability_nsprefix_ = None + self.RemainingBarcodes = RemainingBarcodes + self.RemainingBarcodes_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, eVSExpressMailIntlResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if eVSExpressMailIntlResponse.subclass: + return eVSExpressMailIntlResponse.subclass(*args_, **kwargs_) + else: + return eVSExpressMailIntlResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Postage(self): + return self.Postage + def set_Postage(self, Postage): + self.Postage = Postage + def get_TotalValue(self): + return self.TotalValue + def set_TotalValue(self, TotalValue): + self.TotalValue = TotalValue + def get_SDRValue(self): + return self.SDRValue + def set_SDRValue(self, SDRValue): + self.SDRValue = SDRValue + def get_BarcodeNumber(self): + return self.BarcodeNumber + def set_BarcodeNumber(self, BarcodeNumber): + self.BarcodeNumber = BarcodeNumber + def get_LabelImage(self): + return self.LabelImage + def set_LabelImage(self, LabelImage): + self.LabelImage = LabelImage + def get_Page2Image(self): + return self.Page2Image + def set_Page2Image(self, Page2Image): + self.Page2Image = Page2Image + def get_Page3Image(self): + return self.Page3Image + def set_Page3Image(self, Page3Image): + self.Page3Image = Page3Image + def get_Page4Image(self): + return self.Page4Image + def set_Page4Image(self, Page4Image): + self.Page4Image = Page4Image + def get_Page5Image(self): + return self.Page5Image + def set_Page5Image(self, Page5Image): + self.Page5Image = Page5Image + def get_Page6Image(self): + return self.Page6Image + def set_Page6Image(self, Page6Image): + self.Page6Image = Page6Image + def get_Prohibitions(self): + return self.Prohibitions + def set_Prohibitions(self, Prohibitions): + self.Prohibitions = Prohibitions + def get_Restrictions(self): + return self.Restrictions + def set_Restrictions(self, Restrictions): + self.Restrictions = Restrictions + def get_Observations(self): + return self.Observations + def set_Observations(self, Observations): + self.Observations = Observations + def get_Regulations(self): + return self.Regulations + def set_Regulations(self, Regulations): + self.Regulations = Regulations + def get_AdditionalRestrictions(self): + return self.AdditionalRestrictions + def set_AdditionalRestrictions(self, AdditionalRestrictions): + self.AdditionalRestrictions = AdditionalRestrictions + def get_InsuranceFee(self): + return self.InsuranceFee + def set_InsuranceFee(self, InsuranceFee): + self.InsuranceFee = InsuranceFee + def get_GuaranteeAvailability(self): + return self.GuaranteeAvailability + def set_GuaranteeAvailability(self, GuaranteeAvailability): + self.GuaranteeAvailability = GuaranteeAvailability + def get_RemainingBarcodes(self): + return self.RemainingBarcodes + def set_RemainingBarcodes(self, RemainingBarcodes): + self.RemainingBarcodes = RemainingBarcodes + def has__content(self): + if ( + self.Postage is not None or + self.TotalValue is not None or + self.SDRValue is not None or + self.BarcodeNumber is not None or + self.LabelImage is not None or + self.Page2Image is not None or + self.Page3Image is not None or + self.Page4Image is not None or + self.Page5Image is not None or + self.Page6Image is not None or + self.Prohibitions is not None or + self.Restrictions is not None or + self.Observations is not None or + self.Regulations is not None or + self.AdditionalRestrictions is not None or + self.InsuranceFee is not None or + self.GuaranteeAvailability is not None or + self.RemainingBarcodes is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSExpressMailIntlResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('eVSExpressMailIntlResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'eVSExpressMailIntlResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='eVSExpressMailIntlResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='eVSExpressMailIntlResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='eVSExpressMailIntlResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSExpressMailIntlResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Postage is not None: + namespaceprefix_ = self.Postage_nsprefix_ + ':' if (UseCapturedNS_ and self.Postage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPostage>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Postage, input_name='Postage'), namespaceprefix_ , eol_)) + if self.TotalValue is not None: + namespaceprefix_ = self.TotalValue_nsprefix_ + ':' if (UseCapturedNS_ and self.TotalValue_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTotalValue>%s%s' % (namespaceprefix_ , self.gds_format_float(self.TotalValue, input_name='TotalValue'), namespaceprefix_ , eol_)) + if self.SDRValue is not None: + namespaceprefix_ = self.SDRValue_nsprefix_ + ':' if (UseCapturedNS_ and self.SDRValue_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSDRValue>%s%s' % (namespaceprefix_ , self.gds_format_float(self.SDRValue, input_name='SDRValue'), namespaceprefix_ , eol_)) + if self.BarcodeNumber is not None: + namespaceprefix_ = self.BarcodeNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.BarcodeNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBarcodeNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BarcodeNumber), input_name='BarcodeNumber')), namespaceprefix_ , eol_)) + if self.LabelImage is not None: + namespaceprefix_ = self.LabelImage_nsprefix_ + ':' if (UseCapturedNS_ and self.LabelImage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLabelImage>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LabelImage), input_name='LabelImage')), namespaceprefix_ , eol_)) + if self.Page2Image is not None: + namespaceprefix_ = self.Page2Image_nsprefix_ + ':' if (UseCapturedNS_ and self.Page2Image_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPage2Image>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Page2Image), input_name='Page2Image')), namespaceprefix_ , eol_)) + if self.Page3Image is not None: + namespaceprefix_ = self.Page3Image_nsprefix_ + ':' if (UseCapturedNS_ and self.Page3Image_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPage3Image>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Page3Image), input_name='Page3Image')), namespaceprefix_ , eol_)) + if self.Page4Image is not None: + namespaceprefix_ = self.Page4Image_nsprefix_ + ':' if (UseCapturedNS_ and self.Page4Image_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPage4Image>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Page4Image), input_name='Page4Image')), namespaceprefix_ , eol_)) + if self.Page5Image is not None: + namespaceprefix_ = self.Page5Image_nsprefix_ + ':' if (UseCapturedNS_ and self.Page5Image_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPage5Image>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Page5Image), input_name='Page5Image')), namespaceprefix_ , eol_)) + if self.Page6Image is not None: + namespaceprefix_ = self.Page6Image_nsprefix_ + ':' if (UseCapturedNS_ and self.Page6Image_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPage6Image>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Page6Image), input_name='Page6Image')), namespaceprefix_ , eol_)) + if self.Prohibitions is not None: + namespaceprefix_ = self.Prohibitions_nsprefix_ + ':' if (UseCapturedNS_ and self.Prohibitions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sProhibitions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Prohibitions), input_name='Prohibitions')), namespaceprefix_ , eol_)) + if self.Restrictions is not None: + namespaceprefix_ = self.Restrictions_nsprefix_ + ':' if (UseCapturedNS_ and self.Restrictions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRestrictions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Restrictions), input_name='Restrictions')), namespaceprefix_ , eol_)) + if self.Observations is not None: + namespaceprefix_ = self.Observations_nsprefix_ + ':' if (UseCapturedNS_ and self.Observations_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sObservations>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Observations), input_name='Observations')), namespaceprefix_ , eol_)) + if self.Regulations is not None: + namespaceprefix_ = self.Regulations_nsprefix_ + ':' if (UseCapturedNS_ and self.Regulations_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRegulations>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Regulations), input_name='Regulations')), namespaceprefix_ , eol_)) + if self.AdditionalRestrictions is not None: + namespaceprefix_ = self.AdditionalRestrictions_nsprefix_ + ':' if (UseCapturedNS_ and self.AdditionalRestrictions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAdditionalRestrictions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AdditionalRestrictions), input_name='AdditionalRestrictions')), namespaceprefix_ , eol_)) + if self.InsuranceFee is not None: + namespaceprefix_ = self.InsuranceFee_nsprefix_ + ':' if (UseCapturedNS_ and self.InsuranceFee_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInsuranceFee>%s%s' % (namespaceprefix_ , self.gds_format_float(self.InsuranceFee, input_name='InsuranceFee'), namespaceprefix_ , eol_)) + if self.GuaranteeAvailability is not None: + namespaceprefix_ = self.GuaranteeAvailability_nsprefix_ + ':' if (UseCapturedNS_ and self.GuaranteeAvailability_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGuaranteeAvailability>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.GuaranteeAvailability), input_name='GuaranteeAvailability')), namespaceprefix_ , eol_)) + if self.RemainingBarcodes is not None: + namespaceprefix_ = self.RemainingBarcodes_nsprefix_ + ':' if (UseCapturedNS_ and self.RemainingBarcodes_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRemainingBarcodes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RemainingBarcodes), input_name='RemainingBarcodes')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Postage' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Postage') + fval_ = self.gds_validate_float(fval_, node, 'Postage') + self.Postage = fval_ + self.Postage_nsprefix_ = child_.prefix + elif nodeName_ == 'TotalValue' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'TotalValue') + fval_ = self.gds_validate_float(fval_, node, 'TotalValue') + self.TotalValue = fval_ + self.TotalValue_nsprefix_ = child_.prefix + elif nodeName_ == 'SDRValue' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'SDRValue') + fval_ = self.gds_validate_float(fval_, node, 'SDRValue') + self.SDRValue = fval_ + self.SDRValue_nsprefix_ = child_.prefix + elif nodeName_ == 'BarcodeNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BarcodeNumber') + value_ = self.gds_validate_string(value_, node, 'BarcodeNumber') + self.BarcodeNumber = value_ + self.BarcodeNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'LabelImage': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LabelImage') + value_ = self.gds_validate_string(value_, node, 'LabelImage') + self.LabelImage = value_ + self.LabelImage_nsprefix_ = child_.prefix + elif nodeName_ == 'Page2Image': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Page2Image') + value_ = self.gds_validate_string(value_, node, 'Page2Image') + self.Page2Image = value_ + self.Page2Image_nsprefix_ = child_.prefix + elif nodeName_ == 'Page3Image': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Page3Image') + value_ = self.gds_validate_string(value_, node, 'Page3Image') + self.Page3Image = value_ + self.Page3Image_nsprefix_ = child_.prefix + elif nodeName_ == 'Page4Image': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Page4Image') + value_ = self.gds_validate_string(value_, node, 'Page4Image') + self.Page4Image = value_ + self.Page4Image_nsprefix_ = child_.prefix + elif nodeName_ == 'Page5Image': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Page5Image') + value_ = self.gds_validate_string(value_, node, 'Page5Image') + self.Page5Image = value_ + self.Page5Image_nsprefix_ = child_.prefix + elif nodeName_ == 'Page6Image': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Page6Image') + value_ = self.gds_validate_string(value_, node, 'Page6Image') + self.Page6Image = value_ + self.Page6Image_nsprefix_ = child_.prefix + elif nodeName_ == 'Prohibitions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Prohibitions') + value_ = self.gds_validate_string(value_, node, 'Prohibitions') + self.Prohibitions = value_ + self.Prohibitions_nsprefix_ = child_.prefix + elif nodeName_ == 'Restrictions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Restrictions') + value_ = self.gds_validate_string(value_, node, 'Restrictions') + self.Restrictions = value_ + self.Restrictions_nsprefix_ = child_.prefix + elif nodeName_ == 'Observations': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Observations') + value_ = self.gds_validate_string(value_, node, 'Observations') + self.Observations = value_ + self.Observations_nsprefix_ = child_.prefix + elif nodeName_ == 'Regulations': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Regulations') + value_ = self.gds_validate_string(value_, node, 'Regulations') + self.Regulations = value_ + self.Regulations_nsprefix_ = child_.prefix + elif nodeName_ == 'AdditionalRestrictions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AdditionalRestrictions') + value_ = self.gds_validate_string(value_, node, 'AdditionalRestrictions') + self.AdditionalRestrictions = value_ + self.AdditionalRestrictions_nsprefix_ = child_.prefix + elif nodeName_ == 'InsuranceFee' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'InsuranceFee') + fval_ = self.gds_validate_float(fval_, node, 'InsuranceFee') + self.InsuranceFee = fval_ + self.InsuranceFee_nsprefix_ = child_.prefix + elif nodeName_ == 'GuaranteeAvailability': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'GuaranteeAvailability') + value_ = self.gds_validate_string(value_, node, 'GuaranteeAvailability') + self.GuaranteeAvailability = value_ + self.GuaranteeAvailability_nsprefix_ = child_.prefix + elif nodeName_ == 'RemainingBarcodes': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RemainingBarcodes') + value_ = self.gds_validate_string(value_, node, 'RemainingBarcodes') + self.RemainingBarcodes = value_ + self.RemainingBarcodes_nsprefix_ = child_.prefix +# end class eVSExpressMailIntlResponse + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSExpressMailIntlResponse' + rootClass = eVSExpressMailIntlResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSExpressMailIntlResponse' + rootClass = eVSExpressMailIntlResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSExpressMailIntlResponse' + rootClass = eVSExpressMailIntlResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSExpressMailIntlResponse' + rootClass = eVSExpressMailIntlResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from evs_express_mail_intl_response import *\n\n') + sys.stdout.write('import evs_express_mail_intl_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "eVSExpressMailIntlResponse" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/evs_first_class_mail_intl_request.py b/modules/connectors/usps_international/karrio/schemas/usps_international/evs_first_class_mail_intl_request.py new file mode 100644 index 0000000000..08f4fef4f7 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/evs_first_class_mail_intl_request.py @@ -0,0 +1,3174 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:11 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/evs_first_class_mail_intl_request.py') +# +# Command line arguments: +# ./schemas/eVSFirstClassMailIntlRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/evs_first_class_mail_intl_request.py" ./schemas/eVSFirstClassMailIntlRequest.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class eVSFirstClassMailIntlRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, Option=None, Revision=None, ImageParameters=None, FromFirstName=None, FromMiddleInitial=None, FromLastName=None, FromFirm=None, FromAddress1=None, FromAddress2=None, FromUrbanization=None, FromCity=None, FromState=None, FromZip5=None, FromZip4=None, FromPhone=None, ToName=None, ToFirstName=None, ToLastName=None, ToFirm=None, ToAddress1=None, ToAddress2=None, ToAddress3=None, ToCity=None, ToProvince=None, ToCountry=None, ToPostalCode=None, ToPOBoxFlag=None, ToPhone=None, ToFax=None, ToEmail=None, FirstClassMailType=None, ShippingContents=None, Postage=None, GrossPounds=None, GrossOunces=None, ContentType=None, ContentTypeOther=None, Agreement=None, Comments=None, LicenseNumber=None, CertificateNumber=None, InvoiceNumber=None, ImageType=None, ImageLayout=None, CustomerRefNo=None, CustomerRefNo2=None, POZipCode=None, LabelDate=None, HoldForManifest=None, EELPFC=None, Container=None, Length=None, Width=None, Height=None, Girth=None, ExtraServices=None, PriceOptions=None, ActionCode=None, OptOutOfSPE=None, PermitNumber=None, AccountZipCode=None, Machinable=None, DestinationRateIndicator=None, MID=None, LogisticsManagerMID=None, CRID=None, VendorCode=None, VendorProductVersionNumber=None, ePostageMailerReporting=None, SenderFirstName=None, SenderLastName=None, SenderBusinessName=None, SenderAddress1=None, SenderCity=None, SenderState=None, SenderZip5=None, SenderPhone=None, SenderEmail=None, RemainingBarcodes=None, ChargebackCode=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.Option = Option + self.Option_nsprefix_ = None + self.Revision = Revision + self.Revision_nsprefix_ = None + self.ImageParameters = ImageParameters + self.ImageParameters_nsprefix_ = None + self.FromFirstName = FromFirstName + self.FromFirstName_nsprefix_ = None + self.FromMiddleInitial = FromMiddleInitial + self.FromMiddleInitial_nsprefix_ = None + self.FromLastName = FromLastName + self.FromLastName_nsprefix_ = None + self.FromFirm = FromFirm + self.FromFirm_nsprefix_ = None + self.FromAddress1 = FromAddress1 + self.FromAddress1_nsprefix_ = None + self.FromAddress2 = FromAddress2 + self.FromAddress2_nsprefix_ = None + self.FromUrbanization = FromUrbanization + self.FromUrbanization_nsprefix_ = None + self.FromCity = FromCity + self.FromCity_nsprefix_ = None + self.FromState = FromState + self.FromState_nsprefix_ = None + self.FromZip5 = FromZip5 + self.FromZip5_nsprefix_ = None + self.FromZip4 = FromZip4 + self.FromZip4_nsprefix_ = None + self.FromPhone = FromPhone + self.FromPhone_nsprefix_ = None + self.ToName = ToName + self.ToName_nsprefix_ = None + self.ToFirstName = ToFirstName + self.ToFirstName_nsprefix_ = None + self.ToLastName = ToLastName + self.ToLastName_nsprefix_ = None + self.ToFirm = ToFirm + self.ToFirm_nsprefix_ = None + self.ToAddress1 = ToAddress1 + self.ToAddress1_nsprefix_ = None + self.ToAddress2 = ToAddress2 + self.ToAddress2_nsprefix_ = None + self.ToAddress3 = ToAddress3 + self.ToAddress3_nsprefix_ = None + self.ToCity = ToCity + self.ToCity_nsprefix_ = None + self.ToProvince = ToProvince + self.ToProvince_nsprefix_ = None + self.ToCountry = ToCountry + self.ToCountry_nsprefix_ = None + self.ToPostalCode = ToPostalCode + self.ToPostalCode_nsprefix_ = None + self.ToPOBoxFlag = ToPOBoxFlag + self.ToPOBoxFlag_nsprefix_ = None + self.ToPhone = ToPhone + self.ToPhone_nsprefix_ = None + self.ToFax = ToFax + self.ToFax_nsprefix_ = None + self.ToEmail = ToEmail + self.ToEmail_nsprefix_ = None + self.FirstClassMailType = FirstClassMailType + self.FirstClassMailType_nsprefix_ = None + self.ShippingContents = ShippingContents + self.ShippingContents_nsprefix_ = None + self.Postage = Postage + self.Postage_nsprefix_ = None + self.GrossPounds = GrossPounds + self.GrossPounds_nsprefix_ = None + self.GrossOunces = GrossOunces + self.GrossOunces_nsprefix_ = None + self.ContentType = ContentType + self.ContentType_nsprefix_ = None + self.ContentTypeOther = ContentTypeOther + self.ContentTypeOther_nsprefix_ = None + self.Agreement = Agreement + self.Agreement_nsprefix_ = None + self.Comments = Comments + self.Comments_nsprefix_ = None + self.LicenseNumber = LicenseNumber + self.LicenseNumber_nsprefix_ = None + self.CertificateNumber = CertificateNumber + self.CertificateNumber_nsprefix_ = None + self.InvoiceNumber = InvoiceNumber + self.InvoiceNumber_nsprefix_ = None + self.ImageType = ImageType + self.ImageType_nsprefix_ = None + self.ImageLayout = ImageLayout + self.ImageLayout_nsprefix_ = None + self.CustomerRefNo = CustomerRefNo + self.CustomerRefNo_nsprefix_ = None + self.CustomerRefNo2 = CustomerRefNo2 + self.CustomerRefNo2_nsprefix_ = None + self.POZipCode = POZipCode + self.POZipCode_nsprefix_ = None + self.LabelDate = LabelDate + self.LabelDate_nsprefix_ = None + self.HoldForManifest = HoldForManifest + self.HoldForManifest_nsprefix_ = None + self.EELPFC = EELPFC + self.EELPFC_nsprefix_ = None + self.Container = Container + self.Container_nsprefix_ = None + self.Length = Length + self.Length_nsprefix_ = None + self.Width = Width + self.Width_nsprefix_ = None + self.Height = Height + self.Height_nsprefix_ = None + self.Girth = Girth + self.Girth_nsprefix_ = None + self.ExtraServices = ExtraServices + self.ExtraServices_nsprefix_ = None + self.PriceOptions = PriceOptions + self.PriceOptions_nsprefix_ = None + self.ActionCode = ActionCode + self.ActionCode_nsprefix_ = None + self.OptOutOfSPE = OptOutOfSPE + self.OptOutOfSPE_nsprefix_ = None + self.PermitNumber = PermitNumber + self.PermitNumber_nsprefix_ = None + self.AccountZipCode = AccountZipCode + self.AccountZipCode_nsprefix_ = None + self.Machinable = Machinable + self.Machinable_nsprefix_ = None + self.DestinationRateIndicator = DestinationRateIndicator + self.DestinationRateIndicator_nsprefix_ = None + self.MID = MID + self.MID_nsprefix_ = None + self.LogisticsManagerMID = LogisticsManagerMID + self.LogisticsManagerMID_nsprefix_ = None + self.CRID = CRID + self.CRID_nsprefix_ = None + self.VendorCode = VendorCode + self.VendorCode_nsprefix_ = None + self.VendorProductVersionNumber = VendorProductVersionNumber + self.VendorProductVersionNumber_nsprefix_ = None + self.ePostageMailerReporting = ePostageMailerReporting + self.ePostageMailerReporting_nsprefix_ = None + self.SenderFirstName = SenderFirstName + self.SenderFirstName_nsprefix_ = None + self.SenderLastName = SenderLastName + self.SenderLastName_nsprefix_ = None + self.SenderBusinessName = SenderBusinessName + self.SenderBusinessName_nsprefix_ = None + self.SenderAddress1 = SenderAddress1 + self.SenderAddress1_nsprefix_ = None + self.SenderCity = SenderCity + self.SenderCity_nsprefix_ = None + self.SenderState = SenderState + self.SenderState_nsprefix_ = None + self.SenderZip5 = SenderZip5 + self.SenderZip5_nsprefix_ = None + self.SenderPhone = SenderPhone + self.SenderPhone_nsprefix_ = None + self.SenderEmail = SenderEmail + self.SenderEmail_nsprefix_ = None + self.RemainingBarcodes = RemainingBarcodes + self.RemainingBarcodes_nsprefix_ = None + self.ChargebackCode = ChargebackCode + self.ChargebackCode_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, eVSFirstClassMailIntlRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if eVSFirstClassMailIntlRequest.subclass: + return eVSFirstClassMailIntlRequest.subclass(*args_, **kwargs_) + else: + return eVSFirstClassMailIntlRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Option(self): + return self.Option + def set_Option(self, Option): + self.Option = Option + def get_Revision(self): + return self.Revision + def set_Revision(self, Revision): + self.Revision = Revision + def get_ImageParameters(self): + return self.ImageParameters + def set_ImageParameters(self, ImageParameters): + self.ImageParameters = ImageParameters + def get_FromFirstName(self): + return self.FromFirstName + def set_FromFirstName(self, FromFirstName): + self.FromFirstName = FromFirstName + def get_FromMiddleInitial(self): + return self.FromMiddleInitial + def set_FromMiddleInitial(self, FromMiddleInitial): + self.FromMiddleInitial = FromMiddleInitial + def get_FromLastName(self): + return self.FromLastName + def set_FromLastName(self, FromLastName): + self.FromLastName = FromLastName + def get_FromFirm(self): + return self.FromFirm + def set_FromFirm(self, FromFirm): + self.FromFirm = FromFirm + def get_FromAddress1(self): + return self.FromAddress1 + def set_FromAddress1(self, FromAddress1): + self.FromAddress1 = FromAddress1 + def get_FromAddress2(self): + return self.FromAddress2 + def set_FromAddress2(self, FromAddress2): + self.FromAddress2 = FromAddress2 + def get_FromUrbanization(self): + return self.FromUrbanization + def set_FromUrbanization(self, FromUrbanization): + self.FromUrbanization = FromUrbanization + def get_FromCity(self): + return self.FromCity + def set_FromCity(self, FromCity): + self.FromCity = FromCity + def get_FromState(self): + return self.FromState + def set_FromState(self, FromState): + self.FromState = FromState + def get_FromZip5(self): + return self.FromZip5 + def set_FromZip5(self, FromZip5): + self.FromZip5 = FromZip5 + def get_FromZip4(self): + return self.FromZip4 + def set_FromZip4(self, FromZip4): + self.FromZip4 = FromZip4 + def get_FromPhone(self): + return self.FromPhone + def set_FromPhone(self, FromPhone): + self.FromPhone = FromPhone + def get_ToName(self): + return self.ToName + def set_ToName(self, ToName): + self.ToName = ToName + def get_ToFirstName(self): + return self.ToFirstName + def set_ToFirstName(self, ToFirstName): + self.ToFirstName = ToFirstName + def get_ToLastName(self): + return self.ToLastName + def set_ToLastName(self, ToLastName): + self.ToLastName = ToLastName + def get_ToFirm(self): + return self.ToFirm + def set_ToFirm(self, ToFirm): + self.ToFirm = ToFirm + def get_ToAddress1(self): + return self.ToAddress1 + def set_ToAddress1(self, ToAddress1): + self.ToAddress1 = ToAddress1 + def get_ToAddress2(self): + return self.ToAddress2 + def set_ToAddress2(self, ToAddress2): + self.ToAddress2 = ToAddress2 + def get_ToAddress3(self): + return self.ToAddress3 + def set_ToAddress3(self, ToAddress3): + self.ToAddress3 = ToAddress3 + def get_ToCity(self): + return self.ToCity + def set_ToCity(self, ToCity): + self.ToCity = ToCity + def get_ToProvince(self): + return self.ToProvince + def set_ToProvince(self, ToProvince): + self.ToProvince = ToProvince + def get_ToCountry(self): + return self.ToCountry + def set_ToCountry(self, ToCountry): + self.ToCountry = ToCountry + def get_ToPostalCode(self): + return self.ToPostalCode + def set_ToPostalCode(self, ToPostalCode): + self.ToPostalCode = ToPostalCode + def get_ToPOBoxFlag(self): + return self.ToPOBoxFlag + def set_ToPOBoxFlag(self, ToPOBoxFlag): + self.ToPOBoxFlag = ToPOBoxFlag + def get_ToPhone(self): + return self.ToPhone + def set_ToPhone(self, ToPhone): + self.ToPhone = ToPhone + def get_ToFax(self): + return self.ToFax + def set_ToFax(self, ToFax): + self.ToFax = ToFax + def get_ToEmail(self): + return self.ToEmail + def set_ToEmail(self, ToEmail): + self.ToEmail = ToEmail + def get_FirstClassMailType(self): + return self.FirstClassMailType + def set_FirstClassMailType(self, FirstClassMailType): + self.FirstClassMailType = FirstClassMailType + def get_ShippingContents(self): + return self.ShippingContents + def set_ShippingContents(self, ShippingContents): + self.ShippingContents = ShippingContents + def get_Postage(self): + return self.Postage + def set_Postage(self, Postage): + self.Postage = Postage + def get_GrossPounds(self): + return self.GrossPounds + def set_GrossPounds(self, GrossPounds): + self.GrossPounds = GrossPounds + def get_GrossOunces(self): + return self.GrossOunces + def set_GrossOunces(self, GrossOunces): + self.GrossOunces = GrossOunces + def get_ContentType(self): + return self.ContentType + def set_ContentType(self, ContentType): + self.ContentType = ContentType + def get_ContentTypeOther(self): + return self.ContentTypeOther + def set_ContentTypeOther(self, ContentTypeOther): + self.ContentTypeOther = ContentTypeOther + def get_Agreement(self): + return self.Agreement + def set_Agreement(self, Agreement): + self.Agreement = Agreement + def get_Comments(self): + return self.Comments + def set_Comments(self, Comments): + self.Comments = Comments + def get_LicenseNumber(self): + return self.LicenseNumber + def set_LicenseNumber(self, LicenseNumber): + self.LicenseNumber = LicenseNumber + def get_CertificateNumber(self): + return self.CertificateNumber + def set_CertificateNumber(self, CertificateNumber): + self.CertificateNumber = CertificateNumber + def get_InvoiceNumber(self): + return self.InvoiceNumber + def set_InvoiceNumber(self, InvoiceNumber): + self.InvoiceNumber = InvoiceNumber + def get_ImageType(self): + return self.ImageType + def set_ImageType(self, ImageType): + self.ImageType = ImageType + def get_ImageLayout(self): + return self.ImageLayout + def set_ImageLayout(self, ImageLayout): + self.ImageLayout = ImageLayout + def get_CustomerRefNo(self): + return self.CustomerRefNo + def set_CustomerRefNo(self, CustomerRefNo): + self.CustomerRefNo = CustomerRefNo + def get_CustomerRefNo2(self): + return self.CustomerRefNo2 + def set_CustomerRefNo2(self, CustomerRefNo2): + self.CustomerRefNo2 = CustomerRefNo2 + def get_POZipCode(self): + return self.POZipCode + def set_POZipCode(self, POZipCode): + self.POZipCode = POZipCode + def get_LabelDate(self): + return self.LabelDate + def set_LabelDate(self, LabelDate): + self.LabelDate = LabelDate + def get_HoldForManifest(self): + return self.HoldForManifest + def set_HoldForManifest(self, HoldForManifest): + self.HoldForManifest = HoldForManifest + def get_EELPFC(self): + return self.EELPFC + def set_EELPFC(self, EELPFC): + self.EELPFC = EELPFC + def get_Container(self): + return self.Container + def set_Container(self, Container): + self.Container = Container + def get_Length(self): + return self.Length + def set_Length(self, Length): + self.Length = Length + def get_Width(self): + return self.Width + def set_Width(self, Width): + self.Width = Width + def get_Height(self): + return self.Height + def set_Height(self, Height): + self.Height = Height + def get_Girth(self): + return self.Girth + def set_Girth(self, Girth): + self.Girth = Girth + def get_ExtraServices(self): + return self.ExtraServices + def set_ExtraServices(self, ExtraServices): + self.ExtraServices = ExtraServices + def get_PriceOptions(self): + return self.PriceOptions + def set_PriceOptions(self, PriceOptions): + self.PriceOptions = PriceOptions + def get_ActionCode(self): + return self.ActionCode + def set_ActionCode(self, ActionCode): + self.ActionCode = ActionCode + def get_OptOutOfSPE(self): + return self.OptOutOfSPE + def set_OptOutOfSPE(self, OptOutOfSPE): + self.OptOutOfSPE = OptOutOfSPE + def get_PermitNumber(self): + return self.PermitNumber + def set_PermitNumber(self, PermitNumber): + self.PermitNumber = PermitNumber + def get_AccountZipCode(self): + return self.AccountZipCode + def set_AccountZipCode(self, AccountZipCode): + self.AccountZipCode = AccountZipCode + def get_Machinable(self): + return self.Machinable + def set_Machinable(self, Machinable): + self.Machinable = Machinable + def get_DestinationRateIndicator(self): + return self.DestinationRateIndicator + def set_DestinationRateIndicator(self, DestinationRateIndicator): + self.DestinationRateIndicator = DestinationRateIndicator + def get_MID(self): + return self.MID + def set_MID(self, MID): + self.MID = MID + def get_LogisticsManagerMID(self): + return self.LogisticsManagerMID + def set_LogisticsManagerMID(self, LogisticsManagerMID): + self.LogisticsManagerMID = LogisticsManagerMID + def get_CRID(self): + return self.CRID + def set_CRID(self, CRID): + self.CRID = CRID + def get_VendorCode(self): + return self.VendorCode + def set_VendorCode(self, VendorCode): + self.VendorCode = VendorCode + def get_VendorProductVersionNumber(self): + return self.VendorProductVersionNumber + def set_VendorProductVersionNumber(self, VendorProductVersionNumber): + self.VendorProductVersionNumber = VendorProductVersionNumber + def get_ePostageMailerReporting(self): + return self.ePostageMailerReporting + def set_ePostageMailerReporting(self, ePostageMailerReporting): + self.ePostageMailerReporting = ePostageMailerReporting + def get_SenderFirstName(self): + return self.SenderFirstName + def set_SenderFirstName(self, SenderFirstName): + self.SenderFirstName = SenderFirstName + def get_SenderLastName(self): + return self.SenderLastName + def set_SenderLastName(self, SenderLastName): + self.SenderLastName = SenderLastName + def get_SenderBusinessName(self): + return self.SenderBusinessName + def set_SenderBusinessName(self, SenderBusinessName): + self.SenderBusinessName = SenderBusinessName + def get_SenderAddress1(self): + return self.SenderAddress1 + def set_SenderAddress1(self, SenderAddress1): + self.SenderAddress1 = SenderAddress1 + def get_SenderCity(self): + return self.SenderCity + def set_SenderCity(self, SenderCity): + self.SenderCity = SenderCity + def get_SenderState(self): + return self.SenderState + def set_SenderState(self, SenderState): + self.SenderState = SenderState + def get_SenderZip5(self): + return self.SenderZip5 + def set_SenderZip5(self, SenderZip5): + self.SenderZip5 = SenderZip5 + def get_SenderPhone(self): + return self.SenderPhone + def set_SenderPhone(self, SenderPhone): + self.SenderPhone = SenderPhone + def get_SenderEmail(self): + return self.SenderEmail + def set_SenderEmail(self, SenderEmail): + self.SenderEmail = SenderEmail + def get_RemainingBarcodes(self): + return self.RemainingBarcodes + def set_RemainingBarcodes(self, RemainingBarcodes): + self.RemainingBarcodes = RemainingBarcodes + def get_ChargebackCode(self): + return self.ChargebackCode + def set_ChargebackCode(self, ChargebackCode): + self.ChargebackCode = ChargebackCode + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.Option is not None or + self.Revision is not None or + self.ImageParameters is not None or + self.FromFirstName is not None or + self.FromMiddleInitial is not None or + self.FromLastName is not None or + self.FromFirm is not None or + self.FromAddress1 is not None or + self.FromAddress2 is not None or + self.FromUrbanization is not None or + self.FromCity is not None or + self.FromState is not None or + self.FromZip5 is not None or + self.FromZip4 is not None or + self.FromPhone is not None or + self.ToName is not None or + self.ToFirstName is not None or + self.ToLastName is not None or + self.ToFirm is not None or + self.ToAddress1 is not None or + self.ToAddress2 is not None or + self.ToAddress3 is not None or + self.ToCity is not None or + self.ToProvince is not None or + self.ToCountry is not None or + self.ToPostalCode is not None or + self.ToPOBoxFlag is not None or + self.ToPhone is not None or + self.ToFax is not None or + self.ToEmail is not None or + self.FirstClassMailType is not None or + self.ShippingContents is not None or + self.Postage is not None or + self.GrossPounds is not None or + self.GrossOunces is not None or + self.ContentType is not None or + self.ContentTypeOther is not None or + self.Agreement is not None or + self.Comments is not None or + self.LicenseNumber is not None or + self.CertificateNumber is not None or + self.InvoiceNumber is not None or + self.ImageType is not None or + self.ImageLayout is not None or + self.CustomerRefNo is not None or + self.CustomerRefNo2 is not None or + self.POZipCode is not None or + self.LabelDate is not None or + self.HoldForManifest is not None or + self.EELPFC is not None or + self.Container is not None or + self.Length is not None or + self.Width is not None or + self.Height is not None or + self.Girth is not None or + self.ExtraServices is not None or + self.PriceOptions is not None or + self.ActionCode is not None or + self.OptOutOfSPE is not None or + self.PermitNumber is not None or + self.AccountZipCode is not None or + self.Machinable is not None or + self.DestinationRateIndicator is not None or + self.MID is not None or + self.LogisticsManagerMID is not None or + self.CRID is not None or + self.VendorCode is not None or + self.VendorProductVersionNumber is not None or + self.ePostageMailerReporting is not None or + self.SenderFirstName is not None or + self.SenderLastName is not None or + self.SenderBusinessName is not None or + self.SenderAddress1 is not None or + self.SenderCity is not None or + self.SenderState is not None or + self.SenderZip5 is not None or + self.SenderPhone is not None or + self.SenderEmail is not None or + self.RemainingBarcodes is not None or + self.ChargebackCode is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSFirstClassMailIntlRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('eVSFirstClassMailIntlRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'eVSFirstClassMailIntlRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='eVSFirstClassMailIntlRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='eVSFirstClassMailIntlRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='eVSFirstClassMailIntlRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSFirstClassMailIntlRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Option is not None: + namespaceprefix_ = self.Option_nsprefix_ + ':' if (UseCapturedNS_ and self.Option_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOption>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Option), input_name='Option')), namespaceprefix_ , eol_)) + if self.Revision is not None: + namespaceprefix_ = self.Revision_nsprefix_ + ':' if (UseCapturedNS_ and self.Revision_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRevision>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Revision, input_name='Revision'), namespaceprefix_ , eol_)) + if self.ImageParameters is not None: + namespaceprefix_ = self.ImageParameters_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageParameters_nsprefix_) else '' + self.ImageParameters.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ImageParameters', pretty_print=pretty_print) + if self.FromFirstName is not None: + namespaceprefix_ = self.FromFirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.FromFirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromFirstName), input_name='FromFirstName')), namespaceprefix_ , eol_)) + if self.FromMiddleInitial is not None: + namespaceprefix_ = self.FromMiddleInitial_nsprefix_ + ':' if (UseCapturedNS_ and self.FromMiddleInitial_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromMiddleInitial>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromMiddleInitial), input_name='FromMiddleInitial')), namespaceprefix_ , eol_)) + if self.FromLastName is not None: + namespaceprefix_ = self.FromLastName_nsprefix_ + ':' if (UseCapturedNS_ and self.FromLastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromLastName), input_name='FromLastName')), namespaceprefix_ , eol_)) + if self.FromFirm is not None: + namespaceprefix_ = self.FromFirm_nsprefix_ + ':' if (UseCapturedNS_ and self.FromFirm_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromFirm>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromFirm), input_name='FromFirm')), namespaceprefix_ , eol_)) + if self.FromAddress1 is not None: + namespaceprefix_ = self.FromAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.FromAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromAddress1), input_name='FromAddress1')), namespaceprefix_ , eol_)) + if self.FromAddress2 is not None: + namespaceprefix_ = self.FromAddress2_nsprefix_ + ':' if (UseCapturedNS_ and self.FromAddress2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromAddress2), input_name='FromAddress2')), namespaceprefix_ , eol_)) + if self.FromUrbanization is not None: + namespaceprefix_ = self.FromUrbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.FromUrbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromUrbanization), input_name='FromUrbanization')), namespaceprefix_ , eol_)) + if self.FromCity is not None: + namespaceprefix_ = self.FromCity_nsprefix_ + ':' if (UseCapturedNS_ and self.FromCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromCity), input_name='FromCity')), namespaceprefix_ , eol_)) + if self.FromState is not None: + namespaceprefix_ = self.FromState_nsprefix_ + ':' if (UseCapturedNS_ and self.FromState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromState), input_name='FromState')), namespaceprefix_ , eol_)) + if self.FromZip5 is not None: + namespaceprefix_ = self.FromZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.FromZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromZip5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromZip5), input_name='FromZip5')), namespaceprefix_ , eol_)) + if self.FromZip4 is not None: + namespaceprefix_ = self.FromZip4_nsprefix_ + ':' if (UseCapturedNS_ and self.FromZip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromZip4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromZip4), input_name='FromZip4')), namespaceprefix_ , eol_)) + if self.FromPhone is not None: + namespaceprefix_ = self.FromPhone_nsprefix_ + ':' if (UseCapturedNS_ and self.FromPhone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromPhone), input_name='FromPhone')), namespaceprefix_ , eol_)) + if self.ToName is not None: + namespaceprefix_ = self.ToName_nsprefix_ + ':' if (UseCapturedNS_ and self.ToName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToName), input_name='ToName')), namespaceprefix_ , eol_)) + if self.ToFirstName is not None: + namespaceprefix_ = self.ToFirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.ToFirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToFirstName), input_name='ToFirstName')), namespaceprefix_ , eol_)) + if self.ToLastName is not None: + namespaceprefix_ = self.ToLastName_nsprefix_ + ':' if (UseCapturedNS_ and self.ToLastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToLastName), input_name='ToLastName')), namespaceprefix_ , eol_)) + if self.ToFirm is not None: + namespaceprefix_ = self.ToFirm_nsprefix_ + ':' if (UseCapturedNS_ and self.ToFirm_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToFirm>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToFirm), input_name='ToFirm')), namespaceprefix_ , eol_)) + if self.ToAddress1 is not None: + namespaceprefix_ = self.ToAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress1), input_name='ToAddress1')), namespaceprefix_ , eol_)) + if self.ToAddress2 is not None: + namespaceprefix_ = self.ToAddress2_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress2), input_name='ToAddress2')), namespaceprefix_ , eol_)) + if self.ToAddress3 is not None: + namespaceprefix_ = self.ToAddress3_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress3_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress3>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress3), input_name='ToAddress3')), namespaceprefix_ , eol_)) + if self.ToCity is not None: + namespaceprefix_ = self.ToCity_nsprefix_ + ':' if (UseCapturedNS_ and self.ToCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToCity), input_name='ToCity')), namespaceprefix_ , eol_)) + if self.ToProvince is not None: + namespaceprefix_ = self.ToProvince_nsprefix_ + ':' if (UseCapturedNS_ and self.ToProvince_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToProvince>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToProvince), input_name='ToProvince')), namespaceprefix_ , eol_)) + if self.ToCountry is not None: + namespaceprefix_ = self.ToCountry_nsprefix_ + ':' if (UseCapturedNS_ and self.ToCountry_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToCountry>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToCountry), input_name='ToCountry')), namespaceprefix_ , eol_)) + if self.ToPostalCode is not None: + namespaceprefix_ = self.ToPostalCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ToPostalCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToPostalCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToPostalCode), input_name='ToPostalCode')), namespaceprefix_ , eol_)) + if self.ToPOBoxFlag is not None: + namespaceprefix_ = self.ToPOBoxFlag_nsprefix_ + ':' if (UseCapturedNS_ and self.ToPOBoxFlag_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToPOBoxFlag>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToPOBoxFlag), input_name='ToPOBoxFlag')), namespaceprefix_ , eol_)) + if self.ToPhone is not None: + namespaceprefix_ = self.ToPhone_nsprefix_ + ':' if (UseCapturedNS_ and self.ToPhone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToPhone), input_name='ToPhone')), namespaceprefix_ , eol_)) + if self.ToFax is not None: + namespaceprefix_ = self.ToFax_nsprefix_ + ':' if (UseCapturedNS_ and self.ToFax_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToFax>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToFax), input_name='ToFax')), namespaceprefix_ , eol_)) + if self.ToEmail is not None: + namespaceprefix_ = self.ToEmail_nsprefix_ + ':' if (UseCapturedNS_ and self.ToEmail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToEmail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToEmail), input_name='ToEmail')), namespaceprefix_ , eol_)) + if self.FirstClassMailType is not None: + namespaceprefix_ = self.FirstClassMailType_nsprefix_ + ':' if (UseCapturedNS_ and self.FirstClassMailType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirstClassMailType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirstClassMailType), input_name='FirstClassMailType')), namespaceprefix_ , eol_)) + if self.ShippingContents is not None: + namespaceprefix_ = self.ShippingContents_nsprefix_ + ':' if (UseCapturedNS_ and self.ShippingContents_nsprefix_) else '' + self.ShippingContents.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ShippingContents', pretty_print=pretty_print) + if self.Postage is not None: + namespaceprefix_ = self.Postage_nsprefix_ + ':' if (UseCapturedNS_ and self.Postage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPostage>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Postage), input_name='Postage')), namespaceprefix_ , eol_)) + if self.GrossPounds is not None: + namespaceprefix_ = self.GrossPounds_nsprefix_ + ':' if (UseCapturedNS_ and self.GrossPounds_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGrossPounds>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.GrossPounds, input_name='GrossPounds'), namespaceprefix_ , eol_)) + if self.GrossOunces is not None: + namespaceprefix_ = self.GrossOunces_nsprefix_ + ':' if (UseCapturedNS_ and self.GrossOunces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGrossOunces>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.GrossOunces, input_name='GrossOunces'), namespaceprefix_ , eol_)) + if self.ContentType is not None: + namespaceprefix_ = self.ContentType_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentType), input_name='ContentType')), namespaceprefix_ , eol_)) + if self.ContentTypeOther is not None: + namespaceprefix_ = self.ContentTypeOther_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentTypeOther_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentTypeOther>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentTypeOther), input_name='ContentTypeOther')), namespaceprefix_ , eol_)) + if self.Agreement is not None: + namespaceprefix_ = self.Agreement_nsprefix_ + ':' if (UseCapturedNS_ and self.Agreement_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAgreement>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Agreement), input_name='Agreement')), namespaceprefix_ , eol_)) + if self.Comments is not None: + namespaceprefix_ = self.Comments_nsprefix_ + ':' if (UseCapturedNS_ and self.Comments_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sComments>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Comments), input_name='Comments')), namespaceprefix_ , eol_)) + if self.LicenseNumber is not None: + namespaceprefix_ = self.LicenseNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.LicenseNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLicenseNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LicenseNumber), input_name='LicenseNumber')), namespaceprefix_ , eol_)) + if self.CertificateNumber is not None: + namespaceprefix_ = self.CertificateNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.CertificateNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCertificateNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CertificateNumber), input_name='CertificateNumber')), namespaceprefix_ , eol_)) + if self.InvoiceNumber is not None: + namespaceprefix_ = self.InvoiceNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.InvoiceNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInvoiceNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InvoiceNumber), input_name='InvoiceNumber')), namespaceprefix_ , eol_)) + if self.ImageType is not None: + namespaceprefix_ = self.ImageType_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageType), input_name='ImageType')), namespaceprefix_ , eol_)) + if self.ImageLayout is not None: + namespaceprefix_ = self.ImageLayout_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageLayout_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageLayout>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageLayout), input_name='ImageLayout')), namespaceprefix_ , eol_)) + if self.CustomerRefNo is not None: + namespaceprefix_ = self.CustomerRefNo_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerRefNo_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerRefNo>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerRefNo), input_name='CustomerRefNo')), namespaceprefix_ , eol_)) + if self.CustomerRefNo2 is not None: + namespaceprefix_ = self.CustomerRefNo2_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerRefNo2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerRefNo2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerRefNo2), input_name='CustomerRefNo2')), namespaceprefix_ , eol_)) + if self.POZipCode is not None: + namespaceprefix_ = self.POZipCode_nsprefix_ + ':' if (UseCapturedNS_ and self.POZipCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPOZipCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.POZipCode), input_name='POZipCode')), namespaceprefix_ , eol_)) + if self.LabelDate is not None: + namespaceprefix_ = self.LabelDate_nsprefix_ + ':' if (UseCapturedNS_ and self.LabelDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLabelDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LabelDate), input_name='LabelDate')), namespaceprefix_ , eol_)) + if self.HoldForManifest is not None: + namespaceprefix_ = self.HoldForManifest_nsprefix_ + ':' if (UseCapturedNS_ and self.HoldForManifest_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHoldForManifest>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.HoldForManifest), input_name='HoldForManifest')), namespaceprefix_ , eol_)) + if self.EELPFC is not None: + namespaceprefix_ = self.EELPFC_nsprefix_ + ':' if (UseCapturedNS_ and self.EELPFC_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEELPFC>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EELPFC), input_name='EELPFC')), namespaceprefix_ , eol_)) + if self.Container is not None: + namespaceprefix_ = self.Container_nsprefix_ + ':' if (UseCapturedNS_ and self.Container_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContainer>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Container), input_name='Container')), namespaceprefix_ , eol_)) + if self.Length is not None: + namespaceprefix_ = self.Length_nsprefix_ + ':' if (UseCapturedNS_ and self.Length_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLength>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Length, input_name='Length'), namespaceprefix_ , eol_)) + if self.Width is not None: + namespaceprefix_ = self.Width_nsprefix_ + ':' if (UseCapturedNS_ and self.Width_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sWidth>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Width, input_name='Width'), namespaceprefix_ , eol_)) + if self.Height is not None: + namespaceprefix_ = self.Height_nsprefix_ + ':' if (UseCapturedNS_ and self.Height_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHeight>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Height, input_name='Height'), namespaceprefix_ , eol_)) + if self.Girth is not None: + namespaceprefix_ = self.Girth_nsprefix_ + ':' if (UseCapturedNS_ and self.Girth_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGirth>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Girth, input_name='Girth'), namespaceprefix_ , eol_)) + if self.ExtraServices is not None: + namespaceprefix_ = self.ExtraServices_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraServices_nsprefix_) else '' + self.ExtraServices.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ExtraServices', pretty_print=pretty_print) + if self.PriceOptions is not None: + namespaceprefix_ = self.PriceOptions_nsprefix_ + ':' if (UseCapturedNS_ and self.PriceOptions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPriceOptions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PriceOptions), input_name='PriceOptions')), namespaceprefix_ , eol_)) + if self.ActionCode is not None: + namespaceprefix_ = self.ActionCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ActionCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sActionCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ActionCode), input_name='ActionCode')), namespaceprefix_ , eol_)) + if self.OptOutOfSPE is not None: + namespaceprefix_ = self.OptOutOfSPE_nsprefix_ + ':' if (UseCapturedNS_ and self.OptOutOfSPE_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOptOutOfSPE>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.OptOutOfSPE, input_name='OptOutOfSPE'), namespaceprefix_ , eol_)) + if self.PermitNumber is not None: + namespaceprefix_ = self.PermitNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PermitNumber), input_name='PermitNumber')), namespaceprefix_ , eol_)) + if self.AccountZipCode is not None: + namespaceprefix_ = self.AccountZipCode_nsprefix_ + ':' if (UseCapturedNS_ and self.AccountZipCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAccountZipCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AccountZipCode), input_name='AccountZipCode')), namespaceprefix_ , eol_)) + if self.Machinable is not None: + namespaceprefix_ = self.Machinable_nsprefix_ + ':' if (UseCapturedNS_ and self.Machinable_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMachinable>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.Machinable, input_name='Machinable'), namespaceprefix_ , eol_)) + if self.DestinationRateIndicator is not None: + namespaceprefix_ = self.DestinationRateIndicator_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationRateIndicator_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationRateIndicator>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DestinationRateIndicator), input_name='DestinationRateIndicator')), namespaceprefix_ , eol_)) + if self.MID is not None: + namespaceprefix_ = self.MID_nsprefix_ + ':' if (UseCapturedNS_ and self.MID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MID), input_name='MID')), namespaceprefix_ , eol_)) + if self.LogisticsManagerMID is not None: + namespaceprefix_ = self.LogisticsManagerMID_nsprefix_ + ':' if (UseCapturedNS_ and self.LogisticsManagerMID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLogisticsManagerMID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LogisticsManagerMID), input_name='LogisticsManagerMID')), namespaceprefix_ , eol_)) + if self.CRID is not None: + namespaceprefix_ = self.CRID_nsprefix_ + ':' if (UseCapturedNS_ and self.CRID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCRID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CRID), input_name='CRID')), namespaceprefix_ , eol_)) + if self.VendorCode is not None: + namespaceprefix_ = self.VendorCode_nsprefix_ + ':' if (UseCapturedNS_ and self.VendorCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sVendorCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.VendorCode), input_name='VendorCode')), namespaceprefix_ , eol_)) + if self.VendorProductVersionNumber is not None: + namespaceprefix_ = self.VendorProductVersionNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.VendorProductVersionNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sVendorProductVersionNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.VendorProductVersionNumber), input_name='VendorProductVersionNumber')), namespaceprefix_ , eol_)) + if self.ePostageMailerReporting is not None: + namespaceprefix_ = self.ePostageMailerReporting_nsprefix_ + ':' if (UseCapturedNS_ and self.ePostageMailerReporting_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sePostageMailerReporting>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ePostageMailerReporting), input_name='ePostageMailerReporting')), namespaceprefix_ , eol_)) + if self.SenderFirstName is not None: + namespaceprefix_ = self.SenderFirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderFirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderFirstName), input_name='SenderFirstName')), namespaceprefix_ , eol_)) + if self.SenderLastName is not None: + namespaceprefix_ = self.SenderLastName_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderLastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderLastName), input_name='SenderLastName')), namespaceprefix_ , eol_)) + if self.SenderBusinessName is not None: + namespaceprefix_ = self.SenderBusinessName_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderBusinessName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderBusinessName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderBusinessName), input_name='SenderBusinessName')), namespaceprefix_ , eol_)) + if self.SenderAddress1 is not None: + namespaceprefix_ = self.SenderAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderAddress1), input_name='SenderAddress1')), namespaceprefix_ , eol_)) + if self.SenderCity is not None: + namespaceprefix_ = self.SenderCity_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderCity), input_name='SenderCity')), namespaceprefix_ , eol_)) + if self.SenderState is not None: + namespaceprefix_ = self.SenderState_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderState), input_name='SenderState')), namespaceprefix_ , eol_)) + if self.SenderZip5 is not None: + namespaceprefix_ = self.SenderZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderZip5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderZip5), input_name='SenderZip5')), namespaceprefix_ , eol_)) + if self.SenderPhone is not None: + namespaceprefix_ = self.SenderPhone_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderPhone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderPhone), input_name='SenderPhone')), namespaceprefix_ , eol_)) + if self.SenderEmail is not None: + namespaceprefix_ = self.SenderEmail_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderEmail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderEmail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderEmail), input_name='SenderEmail')), namespaceprefix_ , eol_)) + if self.RemainingBarcodes is not None: + namespaceprefix_ = self.RemainingBarcodes_nsprefix_ + ':' if (UseCapturedNS_ and self.RemainingBarcodes_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRemainingBarcodes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RemainingBarcodes), input_name='RemainingBarcodes')), namespaceprefix_ , eol_)) + if self.ChargebackCode is not None: + namespaceprefix_ = self.ChargebackCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ChargebackCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sChargebackCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ChargebackCode), input_name='ChargebackCode')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Option': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Option') + value_ = self.gds_validate_string(value_, node, 'Option') + self.Option = value_ + self.Option_nsprefix_ = child_.prefix + elif nodeName_ == 'Revision' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Revision') + ival_ = self.gds_validate_integer(ival_, node, 'Revision') + self.Revision = ival_ + self.Revision_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageParameters': + obj_ = ImageParametersType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ImageParameters = obj_ + obj_.original_tagname_ = 'ImageParameters' + elif nodeName_ == 'FromFirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromFirstName') + value_ = self.gds_validate_string(value_, node, 'FromFirstName') + self.FromFirstName = value_ + self.FromFirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'FromMiddleInitial': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromMiddleInitial') + value_ = self.gds_validate_string(value_, node, 'FromMiddleInitial') + self.FromMiddleInitial = value_ + self.FromMiddleInitial_nsprefix_ = child_.prefix + elif nodeName_ == 'FromLastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromLastName') + value_ = self.gds_validate_string(value_, node, 'FromLastName') + self.FromLastName = value_ + self.FromLastName_nsprefix_ = child_.prefix + elif nodeName_ == 'FromFirm': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromFirm') + value_ = self.gds_validate_string(value_, node, 'FromFirm') + self.FromFirm = value_ + self.FromFirm_nsprefix_ = child_.prefix + elif nodeName_ == 'FromAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromAddress1') + value_ = self.gds_validate_string(value_, node, 'FromAddress1') + self.FromAddress1 = value_ + self.FromAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'FromAddress2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromAddress2') + value_ = self.gds_validate_string(value_, node, 'FromAddress2') + self.FromAddress2 = value_ + self.FromAddress2_nsprefix_ = child_.prefix + elif nodeName_ == 'FromUrbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromUrbanization') + value_ = self.gds_validate_string(value_, node, 'FromUrbanization') + self.FromUrbanization = value_ + self.FromUrbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'FromCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromCity') + value_ = self.gds_validate_string(value_, node, 'FromCity') + self.FromCity = value_ + self.FromCity_nsprefix_ = child_.prefix + elif nodeName_ == 'FromState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromState') + value_ = self.gds_validate_string(value_, node, 'FromState') + self.FromState = value_ + self.FromState_nsprefix_ = child_.prefix + elif nodeName_ == 'FromZip5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromZip5') + value_ = self.gds_validate_string(value_, node, 'FromZip5') + self.FromZip5 = value_ + self.FromZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'FromZip4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromZip4') + value_ = self.gds_validate_string(value_, node, 'FromZip4') + self.FromZip4 = value_ + self.FromZip4_nsprefix_ = child_.prefix + elif nodeName_ == 'FromPhone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromPhone') + value_ = self.gds_validate_string(value_, node, 'FromPhone') + self.FromPhone = value_ + self.FromPhone_nsprefix_ = child_.prefix + elif nodeName_ == 'ToName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToName') + value_ = self.gds_validate_string(value_, node, 'ToName') + self.ToName = value_ + self.ToName_nsprefix_ = child_.prefix + elif nodeName_ == 'ToFirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToFirstName') + value_ = self.gds_validate_string(value_, node, 'ToFirstName') + self.ToFirstName = value_ + self.ToFirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'ToLastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToLastName') + value_ = self.gds_validate_string(value_, node, 'ToLastName') + self.ToLastName = value_ + self.ToLastName_nsprefix_ = child_.prefix + elif nodeName_ == 'ToFirm': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToFirm') + value_ = self.gds_validate_string(value_, node, 'ToFirm') + self.ToFirm = value_ + self.ToFirm_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress1') + value_ = self.gds_validate_string(value_, node, 'ToAddress1') + self.ToAddress1 = value_ + self.ToAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress2') + value_ = self.gds_validate_string(value_, node, 'ToAddress2') + self.ToAddress2 = value_ + self.ToAddress2_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress3': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress3') + value_ = self.gds_validate_string(value_, node, 'ToAddress3') + self.ToAddress3 = value_ + self.ToAddress3_nsprefix_ = child_.prefix + elif nodeName_ == 'ToCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToCity') + value_ = self.gds_validate_string(value_, node, 'ToCity') + self.ToCity = value_ + self.ToCity_nsprefix_ = child_.prefix + elif nodeName_ == 'ToProvince': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToProvince') + value_ = self.gds_validate_string(value_, node, 'ToProvince') + self.ToProvince = value_ + self.ToProvince_nsprefix_ = child_.prefix + elif nodeName_ == 'ToCountry': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToCountry') + value_ = self.gds_validate_string(value_, node, 'ToCountry') + self.ToCountry = value_ + self.ToCountry_nsprefix_ = child_.prefix + elif nodeName_ == 'ToPostalCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToPostalCode') + value_ = self.gds_validate_string(value_, node, 'ToPostalCode') + self.ToPostalCode = value_ + self.ToPostalCode_nsprefix_ = child_.prefix + elif nodeName_ == 'ToPOBoxFlag': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToPOBoxFlag') + value_ = self.gds_validate_string(value_, node, 'ToPOBoxFlag') + self.ToPOBoxFlag = value_ + self.ToPOBoxFlag_nsprefix_ = child_.prefix + elif nodeName_ == 'ToPhone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToPhone') + value_ = self.gds_validate_string(value_, node, 'ToPhone') + self.ToPhone = value_ + self.ToPhone_nsprefix_ = child_.prefix + elif nodeName_ == 'ToFax': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToFax') + value_ = self.gds_validate_string(value_, node, 'ToFax') + self.ToFax = value_ + self.ToFax_nsprefix_ = child_.prefix + elif nodeName_ == 'ToEmail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToEmail') + value_ = self.gds_validate_string(value_, node, 'ToEmail') + self.ToEmail = value_ + self.ToEmail_nsprefix_ = child_.prefix + elif nodeName_ == 'FirstClassMailType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirstClassMailType') + value_ = self.gds_validate_string(value_, node, 'FirstClassMailType') + self.FirstClassMailType = value_ + self.FirstClassMailType_nsprefix_ = child_.prefix + elif nodeName_ == 'ShippingContents': + obj_ = ShippingContentsType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ShippingContents = obj_ + obj_.original_tagname_ = 'ShippingContents' + elif nodeName_ == 'Postage': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Postage') + value_ = self.gds_validate_string(value_, node, 'Postage') + self.Postage = value_ + self.Postage_nsprefix_ = child_.prefix + elif nodeName_ == 'GrossPounds' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'GrossPounds') + fval_ = self.gds_validate_decimal(fval_, node, 'GrossPounds') + self.GrossPounds = fval_ + self.GrossPounds_nsprefix_ = child_.prefix + elif nodeName_ == 'GrossOunces' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'GrossOunces') + fval_ = self.gds_validate_decimal(fval_, node, 'GrossOunces') + self.GrossOunces = fval_ + self.GrossOunces_nsprefix_ = child_.prefix + elif nodeName_ == 'ContentType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentType') + value_ = self.gds_validate_string(value_, node, 'ContentType') + self.ContentType = value_ + self.ContentType_nsprefix_ = child_.prefix + elif nodeName_ == 'ContentTypeOther': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentTypeOther') + value_ = self.gds_validate_string(value_, node, 'ContentTypeOther') + self.ContentTypeOther = value_ + self.ContentTypeOther_nsprefix_ = child_.prefix + elif nodeName_ == 'Agreement': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Agreement') + value_ = self.gds_validate_string(value_, node, 'Agreement') + self.Agreement = value_ + self.Agreement_nsprefix_ = child_.prefix + elif nodeName_ == 'Comments': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Comments') + value_ = self.gds_validate_string(value_, node, 'Comments') + self.Comments = value_ + self.Comments_nsprefix_ = child_.prefix + elif nodeName_ == 'LicenseNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LicenseNumber') + value_ = self.gds_validate_string(value_, node, 'LicenseNumber') + self.LicenseNumber = value_ + self.LicenseNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'CertificateNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CertificateNumber') + value_ = self.gds_validate_string(value_, node, 'CertificateNumber') + self.CertificateNumber = value_ + self.CertificateNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'InvoiceNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'InvoiceNumber') + value_ = self.gds_validate_string(value_, node, 'InvoiceNumber') + self.InvoiceNumber = value_ + self.InvoiceNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageType') + value_ = self.gds_validate_string(value_, node, 'ImageType') + self.ImageType = value_ + self.ImageType_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageLayout': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageLayout') + value_ = self.gds_validate_string(value_, node, 'ImageLayout') + self.ImageLayout = value_ + self.ImageLayout_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerRefNo': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerRefNo') + value_ = self.gds_validate_string(value_, node, 'CustomerRefNo') + self.CustomerRefNo = value_ + self.CustomerRefNo_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerRefNo2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerRefNo2') + value_ = self.gds_validate_string(value_, node, 'CustomerRefNo2') + self.CustomerRefNo2 = value_ + self.CustomerRefNo2_nsprefix_ = child_.prefix + elif nodeName_ == 'POZipCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'POZipCode') + value_ = self.gds_validate_string(value_, node, 'POZipCode') + self.POZipCode = value_ + self.POZipCode_nsprefix_ = child_.prefix + elif nodeName_ == 'LabelDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LabelDate') + value_ = self.gds_validate_string(value_, node, 'LabelDate') + self.LabelDate = value_ + self.LabelDate_nsprefix_ = child_.prefix + elif nodeName_ == 'HoldForManifest': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'HoldForManifest') + value_ = self.gds_validate_string(value_, node, 'HoldForManifest') + self.HoldForManifest = value_ + self.HoldForManifest_nsprefix_ = child_.prefix + elif nodeName_ == 'EELPFC': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EELPFC') + value_ = self.gds_validate_string(value_, node, 'EELPFC') + self.EELPFC = value_ + self.EELPFC_nsprefix_ = child_.prefix + elif nodeName_ == 'Container': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Container') + value_ = self.gds_validate_string(value_, node, 'Container') + self.Container = value_ + self.Container_nsprefix_ = child_.prefix + elif nodeName_ == 'Length' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Length') + fval_ = self.gds_validate_decimal(fval_, node, 'Length') + self.Length = fval_ + self.Length_nsprefix_ = child_.prefix + elif nodeName_ == 'Width' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Width') + fval_ = self.gds_validate_decimal(fval_, node, 'Width') + self.Width = fval_ + self.Width_nsprefix_ = child_.prefix + elif nodeName_ == 'Height' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Height') + fval_ = self.gds_validate_decimal(fval_, node, 'Height') + self.Height = fval_ + self.Height_nsprefix_ = child_.prefix + elif nodeName_ == 'Girth' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Girth') + fval_ = self.gds_validate_decimal(fval_, node, 'Girth') + self.Girth = fval_ + self.Girth_nsprefix_ = child_.prefix + elif nodeName_ == 'ExtraServices': + obj_ = ExtraServicesType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ExtraServices = obj_ + obj_.original_tagname_ = 'ExtraServices' + elif nodeName_ == 'PriceOptions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PriceOptions') + value_ = self.gds_validate_string(value_, node, 'PriceOptions') + self.PriceOptions = value_ + self.PriceOptions_nsprefix_ = child_.prefix + elif nodeName_ == 'ActionCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ActionCode') + value_ = self.gds_validate_string(value_, node, 'ActionCode') + self.ActionCode = value_ + self.ActionCode_nsprefix_ = child_.prefix + elif nodeName_ == 'OptOutOfSPE': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'OptOutOfSPE') + ival_ = self.gds_validate_boolean(ival_, node, 'OptOutOfSPE') + self.OptOutOfSPE = ival_ + self.OptOutOfSPE_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PermitNumber') + value_ = self.gds_validate_string(value_, node, 'PermitNumber') + self.PermitNumber = value_ + self.PermitNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'AccountZipCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AccountZipCode') + value_ = self.gds_validate_string(value_, node, 'AccountZipCode') + self.AccountZipCode = value_ + self.AccountZipCode_nsprefix_ = child_.prefix + elif nodeName_ == 'Machinable': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'Machinable') + ival_ = self.gds_validate_boolean(ival_, node, 'Machinable') + self.Machinable = ival_ + self.Machinable_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationRateIndicator': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DestinationRateIndicator') + value_ = self.gds_validate_string(value_, node, 'DestinationRateIndicator') + self.DestinationRateIndicator = value_ + self.DestinationRateIndicator_nsprefix_ = child_.prefix + elif nodeName_ == 'MID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'MID') + value_ = self.gds_validate_string(value_, node, 'MID') + self.MID = value_ + self.MID_nsprefix_ = child_.prefix + elif nodeName_ == 'LogisticsManagerMID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LogisticsManagerMID') + value_ = self.gds_validate_string(value_, node, 'LogisticsManagerMID') + self.LogisticsManagerMID = value_ + self.LogisticsManagerMID_nsprefix_ = child_.prefix + elif nodeName_ == 'CRID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CRID') + value_ = self.gds_validate_string(value_, node, 'CRID') + self.CRID = value_ + self.CRID_nsprefix_ = child_.prefix + elif nodeName_ == 'VendorCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'VendorCode') + value_ = self.gds_validate_string(value_, node, 'VendorCode') + self.VendorCode = value_ + self.VendorCode_nsprefix_ = child_.prefix + elif nodeName_ == 'VendorProductVersionNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'VendorProductVersionNumber') + value_ = self.gds_validate_string(value_, node, 'VendorProductVersionNumber') + self.VendorProductVersionNumber = value_ + self.VendorProductVersionNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'ePostageMailerReporting': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ePostageMailerReporting') + value_ = self.gds_validate_string(value_, node, 'ePostageMailerReporting') + self.ePostageMailerReporting = value_ + self.ePostageMailerReporting_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderFirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderFirstName') + value_ = self.gds_validate_string(value_, node, 'SenderFirstName') + self.SenderFirstName = value_ + self.SenderFirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderLastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderLastName') + value_ = self.gds_validate_string(value_, node, 'SenderLastName') + self.SenderLastName = value_ + self.SenderLastName_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderBusinessName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderBusinessName') + value_ = self.gds_validate_string(value_, node, 'SenderBusinessName') + self.SenderBusinessName = value_ + self.SenderBusinessName_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderAddress1') + value_ = self.gds_validate_string(value_, node, 'SenderAddress1') + self.SenderAddress1 = value_ + self.SenderAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderCity') + value_ = self.gds_validate_string(value_, node, 'SenderCity') + self.SenderCity = value_ + self.SenderCity_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderState') + value_ = self.gds_validate_string(value_, node, 'SenderState') + self.SenderState = value_ + self.SenderState_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderZip5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderZip5') + value_ = self.gds_validate_string(value_, node, 'SenderZip5') + self.SenderZip5 = value_ + self.SenderZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderPhone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderPhone') + value_ = self.gds_validate_string(value_, node, 'SenderPhone') + self.SenderPhone = value_ + self.SenderPhone_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderEmail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderEmail') + value_ = self.gds_validate_string(value_, node, 'SenderEmail') + self.SenderEmail = value_ + self.SenderEmail_nsprefix_ = child_.prefix + elif nodeName_ == 'RemainingBarcodes': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RemainingBarcodes') + value_ = self.gds_validate_string(value_, node, 'RemainingBarcodes') + self.RemainingBarcodes = value_ + self.RemainingBarcodes_nsprefix_ = child_.prefix + elif nodeName_ == 'ChargebackCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ChargebackCode') + value_ = self.gds_validate_string(value_, node, 'ChargebackCode') + self.ChargebackCode = value_ + self.ChargebackCode_nsprefix_ = child_.prefix +# end class eVSFirstClassMailIntlRequest + + +class ImageParametersType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ImageParameter=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ImageParameter = ImageParameter + self.ImageParameter_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ImageParametersType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ImageParametersType.subclass: + return ImageParametersType.subclass(*args_, **kwargs_) + else: + return ImageParametersType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ImageParameter(self): + return self.ImageParameter + def set_ImageParameter(self, ImageParameter): + self.ImageParameter = ImageParameter + def has__content(self): + if ( + self.ImageParameter is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ImageParametersType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ImageParametersType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ImageParametersType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ImageParametersType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ImageParametersType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ImageParametersType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ImageParametersType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ImageParameter is not None: + namespaceprefix_ = self.ImageParameter_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageParameter_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageParameter>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageParameter), input_name='ImageParameter')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ImageParameter': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageParameter') + value_ = self.gds_validate_string(value_, node, 'ImageParameter') + self.ImageParameter = value_ + self.ImageParameter_nsprefix_ = child_.prefix +# end class ImageParametersType + + +class ShippingContentsType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ItemDetail=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if ItemDetail is None: + self.ItemDetail = [] + else: + self.ItemDetail = ItemDetail + self.ItemDetail_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ShippingContentsType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ShippingContentsType.subclass: + return ShippingContentsType.subclass(*args_, **kwargs_) + else: + return ShippingContentsType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ItemDetail(self): + return self.ItemDetail + def set_ItemDetail(self, ItemDetail): + self.ItemDetail = ItemDetail + def add_ItemDetail(self, value): + self.ItemDetail.append(value) + def insert_ItemDetail_at(self, index, value): + self.ItemDetail.insert(index, value) + def replace_ItemDetail_at(self, index, value): + self.ItemDetail[index] = value + def has__content(self): + if ( + self.ItemDetail + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ShippingContentsType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ShippingContentsType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ShippingContentsType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ShippingContentsType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ShippingContentsType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ShippingContentsType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ShippingContentsType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for ItemDetail_ in self.ItemDetail: + namespaceprefix_ = self.ItemDetail_nsprefix_ + ':' if (UseCapturedNS_ and self.ItemDetail_nsprefix_) else '' + ItemDetail_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ItemDetail', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ItemDetail': + obj_ = ItemDetailType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ItemDetail.append(obj_) + obj_.original_tagname_ = 'ItemDetail' +# end class ShippingContentsType + + +class ItemDetailType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Description=None, Quantity=None, Value=None, NetPounds=None, NetOunces=None, HSTariffNumber=None, CountryOfOrigin=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Description = Description + self.Description_nsprefix_ = None + self.Quantity = Quantity + self.Quantity_nsprefix_ = None + self.Value = Value + self.Value_nsprefix_ = None + self.NetPounds = NetPounds + self.NetPounds_nsprefix_ = None + self.NetOunces = NetOunces + self.NetOunces_nsprefix_ = None + self.HSTariffNumber = HSTariffNumber + self.HSTariffNumber_nsprefix_ = None + self.CountryOfOrigin = CountryOfOrigin + self.CountryOfOrigin_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ItemDetailType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ItemDetailType.subclass: + return ItemDetailType.subclass(*args_, **kwargs_) + else: + return ItemDetailType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Description(self): + return self.Description + def set_Description(self, Description): + self.Description = Description + def get_Quantity(self): + return self.Quantity + def set_Quantity(self, Quantity): + self.Quantity = Quantity + def get_Value(self): + return self.Value + def set_Value(self, Value): + self.Value = Value + def get_NetPounds(self): + return self.NetPounds + def set_NetPounds(self, NetPounds): + self.NetPounds = NetPounds + def get_NetOunces(self): + return self.NetOunces + def set_NetOunces(self, NetOunces): + self.NetOunces = NetOunces + def get_HSTariffNumber(self): + return self.HSTariffNumber + def set_HSTariffNumber(self, HSTariffNumber): + self.HSTariffNumber = HSTariffNumber + def get_CountryOfOrigin(self): + return self.CountryOfOrigin + def set_CountryOfOrigin(self, CountryOfOrigin): + self.CountryOfOrigin = CountryOfOrigin + def has__content(self): + if ( + self.Description is not None or + self.Quantity is not None or + self.Value is not None or + self.NetPounds is not None or + self.NetOunces is not None or + self.HSTariffNumber is not None or + self.CountryOfOrigin is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ItemDetailType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ItemDetailType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ItemDetailType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ItemDetailType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ItemDetailType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ItemDetailType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ItemDetailType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Description is not None: + namespaceprefix_ = self.Description_nsprefix_ + ':' if (UseCapturedNS_ and self.Description_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDescription>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Description), input_name='Description')), namespaceprefix_ , eol_)) + if self.Quantity is not None: + namespaceprefix_ = self.Quantity_nsprefix_ + ':' if (UseCapturedNS_ and self.Quantity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sQuantity>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Quantity, input_name='Quantity'), namespaceprefix_ , eol_)) + if self.Value is not None: + namespaceprefix_ = self.Value_nsprefix_ + ':' if (UseCapturedNS_ and self.Value_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sValue>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Value, input_name='Value'), namespaceprefix_ , eol_)) + if self.NetPounds is not None: + namespaceprefix_ = self.NetPounds_nsprefix_ + ':' if (UseCapturedNS_ and self.NetPounds_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNetPounds>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.NetPounds, input_name='NetPounds'), namespaceprefix_ , eol_)) + if self.NetOunces is not None: + namespaceprefix_ = self.NetOunces_nsprefix_ + ':' if (UseCapturedNS_ and self.NetOunces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNetOunces>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.NetOunces, input_name='NetOunces'), namespaceprefix_ , eol_)) + if self.HSTariffNumber is not None: + namespaceprefix_ = self.HSTariffNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.HSTariffNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHSTariffNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.HSTariffNumber), input_name='HSTariffNumber')), namespaceprefix_ , eol_)) + if self.CountryOfOrigin is not None: + namespaceprefix_ = self.CountryOfOrigin_nsprefix_ + ':' if (UseCapturedNS_ and self.CountryOfOrigin_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCountryOfOrigin>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CountryOfOrigin), input_name='CountryOfOrigin')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Description': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Description') + value_ = self.gds_validate_string(value_, node, 'Description') + self.Description = value_ + self.Description_nsprefix_ = child_.prefix + elif nodeName_ == 'Quantity' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Quantity') + ival_ = self.gds_validate_integer(ival_, node, 'Quantity') + self.Quantity = ival_ + self.Quantity_nsprefix_ = child_.prefix + elif nodeName_ == 'Value' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Value') + fval_ = self.gds_validate_decimal(fval_, node, 'Value') + self.Value = fval_ + self.Value_nsprefix_ = child_.prefix + elif nodeName_ == 'NetPounds' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'NetPounds') + fval_ = self.gds_validate_decimal(fval_, node, 'NetPounds') + self.NetPounds = fval_ + self.NetPounds_nsprefix_ = child_.prefix + elif nodeName_ == 'NetOunces' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'NetOunces') + fval_ = self.gds_validate_decimal(fval_, node, 'NetOunces') + self.NetOunces = fval_ + self.NetOunces_nsprefix_ = child_.prefix + elif nodeName_ == 'HSTariffNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'HSTariffNumber') + value_ = self.gds_validate_string(value_, node, 'HSTariffNumber') + self.HSTariffNumber = value_ + self.HSTariffNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'CountryOfOrigin': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CountryOfOrigin') + value_ = self.gds_validate_string(value_, node, 'CountryOfOrigin') + self.CountryOfOrigin = value_ + self.CountryOfOrigin_nsprefix_ = child_.prefix +# end class ItemDetailType + + +class ExtraServicesType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ExtraService=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if ExtraService is None: + self.ExtraService = [] + else: + self.ExtraService = ExtraService + self.ExtraService_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExtraServicesType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExtraServicesType.subclass: + return ExtraServicesType.subclass(*args_, **kwargs_) + else: + return ExtraServicesType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ExtraService(self): + return self.ExtraService + def set_ExtraService(self, ExtraService): + self.ExtraService = ExtraService + def add_ExtraService(self, value): + self.ExtraService.append(value) + def insert_ExtraService_at(self, index, value): + self.ExtraService.insert(index, value) + def replace_ExtraService_at(self, index, value): + self.ExtraService[index] = value + def has__content(self): + if ( + self.ExtraService + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExtraServicesType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExtraServicesType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtraServicesType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtraServicesType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtraServicesType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for ExtraService_ in self.ExtraService: + namespaceprefix_ = self.ExtraService_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraService_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sExtraService>%s%s' % (namespaceprefix_ , self.gds_format_integer(ExtraService_, input_name='ExtraService'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ExtraService' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ExtraService') + ival_ = self.gds_validate_integer(ival_, node, 'ExtraService') + self.ExtraService.append(ival_) + self.ExtraService_nsprefix_ = child_.prefix +# end class ExtraServicesType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSFirstClassMailIntlRequest' + rootClass = eVSFirstClassMailIntlRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSFirstClassMailIntlRequest' + rootClass = eVSFirstClassMailIntlRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSFirstClassMailIntlRequest' + rootClass = eVSFirstClassMailIntlRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSFirstClassMailIntlRequest' + rootClass = eVSFirstClassMailIntlRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from evs_first_class_mail_intl_request import *\n\n') + sys.stdout.write('import evs_first_class_mail_intl_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "ExtraServicesType", + "ImageParametersType", + "ItemDetailType", + "ShippingContentsType", + "eVSFirstClassMailIntlRequest" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/evs_first_class_mail_intl_response.py b/modules/connectors/usps_international/karrio/schemas/usps_international/evs_first_class_mail_intl_response.py new file mode 100644 index 0000000000..6e23227178 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/evs_first_class_mail_intl_response.py @@ -0,0 +1,1746 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:11 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/evs_first_class_mail_intl_response.py') +# +# Command line arguments: +# ./schemas/eVSFirstClassMailIntlResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/evs_first_class_mail_intl_response.py" ./schemas/eVSFirstClassMailIntlResponse.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class eVSFirstClassMailIntlResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Postage=None, TotalValue=None, BarcodeNumber=None, LabelImage=None, Page2Image=None, Page3Image=None, Prohibitions=None, Restrictions=None, Observations=None, Regulations=None, AdditionalRestrictions=None, ExtraServices=None, RemainingBarcodes=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Postage = Postage + self.Postage_nsprefix_ = None + self.TotalValue = TotalValue + self.TotalValue_nsprefix_ = None + self.BarcodeNumber = BarcodeNumber + self.BarcodeNumber_nsprefix_ = None + self.LabelImage = LabelImage + self.LabelImage_nsprefix_ = None + self.Page2Image = Page2Image + self.Page2Image_nsprefix_ = None + self.Page3Image = Page3Image + self.Page3Image_nsprefix_ = None + self.Prohibitions = Prohibitions + self.Prohibitions_nsprefix_ = None + self.Restrictions = Restrictions + self.Restrictions_nsprefix_ = None + self.Observations = Observations + self.Observations_nsprefix_ = None + self.Regulations = Regulations + self.Regulations_nsprefix_ = None + self.AdditionalRestrictions = AdditionalRestrictions + self.AdditionalRestrictions_nsprefix_ = None + self.ExtraServices = ExtraServices + self.ExtraServices_nsprefix_ = None + self.RemainingBarcodes = RemainingBarcodes + self.RemainingBarcodes_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, eVSFirstClassMailIntlResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if eVSFirstClassMailIntlResponse.subclass: + return eVSFirstClassMailIntlResponse.subclass(*args_, **kwargs_) + else: + return eVSFirstClassMailIntlResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Postage(self): + return self.Postage + def set_Postage(self, Postage): + self.Postage = Postage + def get_TotalValue(self): + return self.TotalValue + def set_TotalValue(self, TotalValue): + self.TotalValue = TotalValue + def get_BarcodeNumber(self): + return self.BarcodeNumber + def set_BarcodeNumber(self, BarcodeNumber): + self.BarcodeNumber = BarcodeNumber + def get_LabelImage(self): + return self.LabelImage + def set_LabelImage(self, LabelImage): + self.LabelImage = LabelImage + def get_Page2Image(self): + return self.Page2Image + def set_Page2Image(self, Page2Image): + self.Page2Image = Page2Image + def get_Page3Image(self): + return self.Page3Image + def set_Page3Image(self, Page3Image): + self.Page3Image = Page3Image + def get_Prohibitions(self): + return self.Prohibitions + def set_Prohibitions(self, Prohibitions): + self.Prohibitions = Prohibitions + def get_Restrictions(self): + return self.Restrictions + def set_Restrictions(self, Restrictions): + self.Restrictions = Restrictions + def get_Observations(self): + return self.Observations + def set_Observations(self, Observations): + self.Observations = Observations + def get_Regulations(self): + return self.Regulations + def set_Regulations(self, Regulations): + self.Regulations = Regulations + def get_AdditionalRestrictions(self): + return self.AdditionalRestrictions + def set_AdditionalRestrictions(self, AdditionalRestrictions): + self.AdditionalRestrictions = AdditionalRestrictions + def get_ExtraServices(self): + return self.ExtraServices + def set_ExtraServices(self, ExtraServices): + self.ExtraServices = ExtraServices + def get_RemainingBarcodes(self): + return self.RemainingBarcodes + def set_RemainingBarcodes(self, RemainingBarcodes): + self.RemainingBarcodes = RemainingBarcodes + def has__content(self): + if ( + self.Postage is not None or + self.TotalValue is not None or + self.BarcodeNumber is not None or + self.LabelImage is not None or + self.Page2Image is not None or + self.Page3Image is not None or + self.Prohibitions is not None or + self.Restrictions is not None or + self.Observations is not None or + self.Regulations is not None or + self.AdditionalRestrictions is not None or + self.ExtraServices is not None or + self.RemainingBarcodes is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSFirstClassMailIntlResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('eVSFirstClassMailIntlResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'eVSFirstClassMailIntlResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='eVSFirstClassMailIntlResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='eVSFirstClassMailIntlResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='eVSFirstClassMailIntlResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSFirstClassMailIntlResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Postage is not None: + namespaceprefix_ = self.Postage_nsprefix_ + ':' if (UseCapturedNS_ and self.Postage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPostage>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Postage, input_name='Postage'), namespaceprefix_ , eol_)) + if self.TotalValue is not None: + namespaceprefix_ = self.TotalValue_nsprefix_ + ':' if (UseCapturedNS_ and self.TotalValue_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTotalValue>%s%s' % (namespaceprefix_ , self.gds_format_float(self.TotalValue, input_name='TotalValue'), namespaceprefix_ , eol_)) + if self.BarcodeNumber is not None: + namespaceprefix_ = self.BarcodeNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.BarcodeNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBarcodeNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BarcodeNumber), input_name='BarcodeNumber')), namespaceprefix_ , eol_)) + if self.LabelImage is not None: + namespaceprefix_ = self.LabelImage_nsprefix_ + ':' if (UseCapturedNS_ and self.LabelImage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLabelImage>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LabelImage), input_name='LabelImage')), namespaceprefix_ , eol_)) + if self.Page2Image is not None: + namespaceprefix_ = self.Page2Image_nsprefix_ + ':' if (UseCapturedNS_ and self.Page2Image_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPage2Image>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Page2Image), input_name='Page2Image')), namespaceprefix_ , eol_)) + if self.Page3Image is not None: + namespaceprefix_ = self.Page3Image_nsprefix_ + ':' if (UseCapturedNS_ and self.Page3Image_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPage3Image>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Page3Image), input_name='Page3Image')), namespaceprefix_ , eol_)) + if self.Prohibitions is not None: + namespaceprefix_ = self.Prohibitions_nsprefix_ + ':' if (UseCapturedNS_ and self.Prohibitions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sProhibitions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Prohibitions), input_name='Prohibitions')), namespaceprefix_ , eol_)) + if self.Restrictions is not None: + namespaceprefix_ = self.Restrictions_nsprefix_ + ':' if (UseCapturedNS_ and self.Restrictions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRestrictions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Restrictions), input_name='Restrictions')), namespaceprefix_ , eol_)) + if self.Observations is not None: + namespaceprefix_ = self.Observations_nsprefix_ + ':' if (UseCapturedNS_ and self.Observations_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sObservations>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Observations), input_name='Observations')), namespaceprefix_ , eol_)) + if self.Regulations is not None: + namespaceprefix_ = self.Regulations_nsprefix_ + ':' if (UseCapturedNS_ and self.Regulations_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRegulations>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Regulations), input_name='Regulations')), namespaceprefix_ , eol_)) + if self.AdditionalRestrictions is not None: + namespaceprefix_ = self.AdditionalRestrictions_nsprefix_ + ':' if (UseCapturedNS_ and self.AdditionalRestrictions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAdditionalRestrictions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AdditionalRestrictions), input_name='AdditionalRestrictions')), namespaceprefix_ , eol_)) + if self.ExtraServices is not None: + namespaceprefix_ = self.ExtraServices_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraServices_nsprefix_) else '' + self.ExtraServices.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ExtraServices', pretty_print=pretty_print) + if self.RemainingBarcodes is not None: + namespaceprefix_ = self.RemainingBarcodes_nsprefix_ + ':' if (UseCapturedNS_ and self.RemainingBarcodes_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRemainingBarcodes>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.RemainingBarcodes, input_name='RemainingBarcodes'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Postage' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Postage') + fval_ = self.gds_validate_float(fval_, node, 'Postage') + self.Postage = fval_ + self.Postage_nsprefix_ = child_.prefix + elif nodeName_ == 'TotalValue' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'TotalValue') + fval_ = self.gds_validate_float(fval_, node, 'TotalValue') + self.TotalValue = fval_ + self.TotalValue_nsprefix_ = child_.prefix + elif nodeName_ == 'BarcodeNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BarcodeNumber') + value_ = self.gds_validate_string(value_, node, 'BarcodeNumber') + self.BarcodeNumber = value_ + self.BarcodeNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'LabelImage': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LabelImage') + value_ = self.gds_validate_string(value_, node, 'LabelImage') + self.LabelImage = value_ + self.LabelImage_nsprefix_ = child_.prefix + elif nodeName_ == 'Page2Image': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Page2Image') + value_ = self.gds_validate_string(value_, node, 'Page2Image') + self.Page2Image = value_ + self.Page2Image_nsprefix_ = child_.prefix + elif nodeName_ == 'Page3Image': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Page3Image') + value_ = self.gds_validate_string(value_, node, 'Page3Image') + self.Page3Image = value_ + self.Page3Image_nsprefix_ = child_.prefix + elif nodeName_ == 'Prohibitions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Prohibitions') + value_ = self.gds_validate_string(value_, node, 'Prohibitions') + self.Prohibitions = value_ + self.Prohibitions_nsprefix_ = child_.prefix + elif nodeName_ == 'Restrictions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Restrictions') + value_ = self.gds_validate_string(value_, node, 'Restrictions') + self.Restrictions = value_ + self.Restrictions_nsprefix_ = child_.prefix + elif nodeName_ == 'Observations': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Observations') + value_ = self.gds_validate_string(value_, node, 'Observations') + self.Observations = value_ + self.Observations_nsprefix_ = child_.prefix + elif nodeName_ == 'Regulations': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Regulations') + value_ = self.gds_validate_string(value_, node, 'Regulations') + self.Regulations = value_ + self.Regulations_nsprefix_ = child_.prefix + elif nodeName_ == 'AdditionalRestrictions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AdditionalRestrictions') + value_ = self.gds_validate_string(value_, node, 'AdditionalRestrictions') + self.AdditionalRestrictions = value_ + self.AdditionalRestrictions_nsprefix_ = child_.prefix + elif nodeName_ == 'ExtraServices': + obj_ = ExtraServicesType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ExtraServices = obj_ + obj_.original_tagname_ = 'ExtraServices' + elif nodeName_ == 'RemainingBarcodes' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'RemainingBarcodes') + ival_ = self.gds_validate_integer(ival_, node, 'RemainingBarcodes') + self.RemainingBarcodes = ival_ + self.RemainingBarcodes_nsprefix_ = child_.prefix +# end class eVSFirstClassMailIntlResponse + + +class ExtraServicesType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ExtraService=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if ExtraService is None: + self.ExtraService = [] + else: + self.ExtraService = ExtraService + self.ExtraService_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExtraServicesType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExtraServicesType.subclass: + return ExtraServicesType.subclass(*args_, **kwargs_) + else: + return ExtraServicesType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ExtraService(self): + return self.ExtraService + def set_ExtraService(self, ExtraService): + self.ExtraService = ExtraService + def add_ExtraService(self, value): + self.ExtraService.append(value) + def insert_ExtraService_at(self, index, value): + self.ExtraService.insert(index, value) + def replace_ExtraService_at(self, index, value): + self.ExtraService[index] = value + def has__content(self): + if ( + self.ExtraService + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExtraServicesType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExtraServicesType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtraServicesType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtraServicesType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtraServicesType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for ExtraService_ in self.ExtraService: + namespaceprefix_ = self.ExtraService_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraService_nsprefix_) else '' + ExtraService_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ExtraService', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ExtraService': + obj_ = ExtraServiceType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ExtraService.append(obj_) + obj_.original_tagname_ = 'ExtraService' +# end class ExtraServicesType + + +class ExtraServiceType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ServiceID=None, ServiceName=None, Price=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ServiceID = ServiceID + self.ServiceID_nsprefix_ = None + self.ServiceName = ServiceName + self.ServiceName_nsprefix_ = None + self.Price = Price + self.Price_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExtraServiceType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExtraServiceType.subclass: + return ExtraServiceType.subclass(*args_, **kwargs_) + else: + return ExtraServiceType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ServiceID(self): + return self.ServiceID + def set_ServiceID(self, ServiceID): + self.ServiceID = ServiceID + def get_ServiceName(self): + return self.ServiceName + def set_ServiceName(self, ServiceName): + self.ServiceName = ServiceName + def get_Price(self): + return self.Price + def set_Price(self, Price): + self.Price = Price + def has__content(self): + if ( + self.ServiceID is not None or + self.ServiceName is not None or + self.Price is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServiceType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExtraServiceType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExtraServiceType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtraServiceType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtraServiceType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtraServiceType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServiceType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ServiceID is not None: + namespaceprefix_ = self.ServiceID_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceID>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ServiceID, input_name='ServiceID'), namespaceprefix_ , eol_)) + if self.ServiceName is not None: + namespaceprefix_ = self.ServiceName_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceName), input_name='ServiceName')), namespaceprefix_ , eol_)) + if self.Price is not None: + namespaceprefix_ = self.Price_nsprefix_ + ':' if (UseCapturedNS_ and self.Price_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPrice>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Price, input_name='Price'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ServiceID' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ServiceID') + ival_ = self.gds_validate_integer(ival_, node, 'ServiceID') + self.ServiceID = ival_ + self.ServiceID_nsprefix_ = child_.prefix + elif nodeName_ == 'ServiceName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceName') + value_ = self.gds_validate_string(value_, node, 'ServiceName') + self.ServiceName = value_ + self.ServiceName_nsprefix_ = child_.prefix + elif nodeName_ == 'Price' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Price') + fval_ = self.gds_validate_float(fval_, node, 'Price') + self.Price = fval_ + self.Price_nsprefix_ = child_.prefix +# end class ExtraServiceType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSFirstClassMailIntlResponse' + rootClass = eVSFirstClassMailIntlResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSFirstClassMailIntlResponse' + rootClass = eVSFirstClassMailIntlResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSFirstClassMailIntlResponse' + rootClass = eVSFirstClassMailIntlResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSFirstClassMailIntlResponse' + rootClass = eVSFirstClassMailIntlResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from evs_first_class_mail_intl_response import *\n\n') + sys.stdout.write('import evs_first_class_mail_intl_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "ExtraServiceType", + "ExtraServicesType", + "eVSFirstClassMailIntlResponse" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/evs_gxg_get_label_request.py b/modules/connectors/usps_international/karrio/schemas/usps_international/evs_gxg_get_label_request.py new file mode 100644 index 0000000000..4a39b87f71 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/evs_gxg_get_label_request.py @@ -0,0 +1,3352 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:11 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/evs_gxg_get_label_request.py') +# +# Command line arguments: +# ./schemas/eVSGXGGetLabelRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/evs_gxg_get_label_request.py" ./schemas/eVSGXGGetLabelRequest.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class eVSGXGGetLabelRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, Option=None, Revision=None, ImageParameters=None, FromFirstName=None, FromMiddleInitial=None, FromLastName=None, FromFirm=None, FromAddress1=None, FromAddress2=None, FromUrbanization=None, FromCity=None, FromState=None, FromZIP5=None, FromZIP4=None, FromPhone=None, ShipFromZIP=None, SenderEMail=None, ToFirstName=None, ToLastName=None, ToFirm=None, ToAddress1=None, ToAddress2=None, ToAddress3=None, ToPostalCode=None, ToPhone=None, RecipientEMail=None, ToDPID=None, ToProvince=None, ToTaxID=None, Container=None, ContentType=None, ShippingContents=None, PurposeOfShipment=None, PartiesToTransaction=None, Agreement=None, Postage=None, InsuredValue=None, GrossPounds=None, GrossOunces=None, Length=None, Width=None, Height=None, Girth=None, Shape=None, CIRequired=None, InvoiceDate=None, InvoiceNumber=None, CustomerOrderNumber=None, CustOrderNumber=None, TermsDelivery=None, TermsDeliveryOther=None, PackingCost=None, CountryUltDest=None, CIAgreement=None, ImageType=None, ImageLayout=None, CustomerRefNo=None, CustomerRefNo2=None, ShipDate=None, HoldForManifest=None, PriceOptions=None, CommercialShipment=None, BuyerFirstName=None, BuyerLastName=None, BuyerAddress1=None, BuyerAddress2=None, BuyerAddress3=None, BuyerCity=None, BuyerState=None, BuyerPostalCode=None, BuyerCountry=None, BuyerTaxID=None, BuyerRecipient=None, TermsPayment=None, ActionCode=None, OptOutOfSPE=None, PermitNumber=None, AccountZipCode=None, Machinable=None, DestinationRateIndicator=None, MID=None, LogisticsManagerMID=None, CRID=None, VendorCode=None, VendorProductVersionNumber=None, OverrideMID=None, ChargebackCode=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.Option = Option + self.Option_nsprefix_ = None + self.Revision = Revision + self.Revision_nsprefix_ = None + self.ImageParameters = ImageParameters + self.ImageParameters_nsprefix_ = None + self.FromFirstName = FromFirstName + self.FromFirstName_nsprefix_ = None + self.FromMiddleInitial = FromMiddleInitial + self.FromMiddleInitial_nsprefix_ = None + self.FromLastName = FromLastName + self.FromLastName_nsprefix_ = None + self.FromFirm = FromFirm + self.FromFirm_nsprefix_ = None + self.FromAddress1 = FromAddress1 + self.FromAddress1_nsprefix_ = None + self.FromAddress2 = FromAddress2 + self.FromAddress2_nsprefix_ = None + self.FromUrbanization = FromUrbanization + self.FromUrbanization_nsprefix_ = None + self.FromCity = FromCity + self.FromCity_nsprefix_ = None + self.FromState = FromState + self.FromState_nsprefix_ = None + self.FromZIP5 = FromZIP5 + self.FromZIP5_nsprefix_ = None + self.FromZIP4 = FromZIP4 + self.FromZIP4_nsprefix_ = None + self.FromPhone = FromPhone + self.FromPhone_nsprefix_ = None + self.ShipFromZIP = ShipFromZIP + self.ShipFromZIP_nsprefix_ = None + self.SenderEMail = SenderEMail + self.SenderEMail_nsprefix_ = None + self.ToFirstName = ToFirstName + self.ToFirstName_nsprefix_ = None + self.ToLastName = ToLastName + self.ToLastName_nsprefix_ = None + self.ToFirm = ToFirm + self.ToFirm_nsprefix_ = None + self.ToAddress1 = ToAddress1 + self.ToAddress1_nsprefix_ = None + self.ToAddress2 = ToAddress2 + self.ToAddress2_nsprefix_ = None + self.ToAddress3 = ToAddress3 + self.ToAddress3_nsprefix_ = None + self.ToPostalCode = ToPostalCode + self.ToPostalCode_nsprefix_ = None + self.ToPhone = ToPhone + self.ToPhone_nsprefix_ = None + self.RecipientEMail = RecipientEMail + self.RecipientEMail_nsprefix_ = None + self.ToDPID = ToDPID + self.ToDPID_nsprefix_ = None + self.ToProvince = ToProvince + self.ToProvince_nsprefix_ = None + self.ToTaxID = ToTaxID + self.ToTaxID_nsprefix_ = None + self.Container = Container + self.Container_nsprefix_ = None + self.ContentType = ContentType + self.ContentType_nsprefix_ = None + self.ShippingContents = ShippingContents + self.ShippingContents_nsprefix_ = None + self.PurposeOfShipment = PurposeOfShipment + self.PurposeOfShipment_nsprefix_ = None + self.PartiesToTransaction = PartiesToTransaction + self.PartiesToTransaction_nsprefix_ = None + self.Agreement = Agreement + self.Agreement_nsprefix_ = None + self.Postage = Postage + self.Postage_nsprefix_ = None + self.InsuredValue = InsuredValue + self.InsuredValue_nsprefix_ = None + self.GrossPounds = GrossPounds + self.GrossPounds_nsprefix_ = None + self.GrossOunces = GrossOunces + self.GrossOunces_nsprefix_ = None + self.Length = Length + self.Length_nsprefix_ = None + self.Width = Width + self.Width_nsprefix_ = None + self.Height = Height + self.Height_nsprefix_ = None + self.Girth = Girth + self.Girth_nsprefix_ = None + self.Shape = Shape + self.Shape_nsprefix_ = None + self.CIRequired = CIRequired + self.CIRequired_nsprefix_ = None + self.InvoiceDate = InvoiceDate + self.InvoiceDate_nsprefix_ = None + self.InvoiceNumber = InvoiceNumber + self.InvoiceNumber_nsprefix_ = None + self.CustomerOrderNumber = CustomerOrderNumber + self.CustomerOrderNumber_nsprefix_ = None + self.CustOrderNumber = CustOrderNumber + self.CustOrderNumber_nsprefix_ = None + self.TermsDelivery = TermsDelivery + self.TermsDelivery_nsprefix_ = None + self.TermsDeliveryOther = TermsDeliveryOther + self.TermsDeliveryOther_nsprefix_ = None + self.PackingCost = PackingCost + self.PackingCost_nsprefix_ = None + self.CountryUltDest = CountryUltDest + self.CountryUltDest_nsprefix_ = None + self.CIAgreement = CIAgreement + self.CIAgreement_nsprefix_ = None + self.ImageType = ImageType + self.ImageType_nsprefix_ = None + self.ImageLayout = ImageLayout + self.ImageLayout_nsprefix_ = None + self.CustomerRefNo = CustomerRefNo + self.CustomerRefNo_nsprefix_ = None + self.CustomerRefNo2 = CustomerRefNo2 + self.CustomerRefNo2_nsprefix_ = None + self.ShipDate = ShipDate + self.ShipDate_nsprefix_ = None + self.HoldForManifest = HoldForManifest + self.HoldForManifest_nsprefix_ = None + self.PriceOptions = PriceOptions + self.PriceOptions_nsprefix_ = None + self.CommercialShipment = CommercialShipment + self.CommercialShipment_nsprefix_ = None + self.BuyerFirstName = BuyerFirstName + self.BuyerFirstName_nsprefix_ = None + self.BuyerLastName = BuyerLastName + self.BuyerLastName_nsprefix_ = None + self.BuyerAddress1 = BuyerAddress1 + self.BuyerAddress1_nsprefix_ = None + self.BuyerAddress2 = BuyerAddress2 + self.BuyerAddress2_nsprefix_ = None + self.BuyerAddress3 = BuyerAddress3 + self.BuyerAddress3_nsprefix_ = None + self.BuyerCity = BuyerCity + self.BuyerCity_nsprefix_ = None + self.BuyerState = BuyerState + self.BuyerState_nsprefix_ = None + self.BuyerPostalCode = BuyerPostalCode + self.BuyerPostalCode_nsprefix_ = None + self.BuyerCountry = BuyerCountry + self.BuyerCountry_nsprefix_ = None + self.BuyerTaxID = BuyerTaxID + self.BuyerTaxID_nsprefix_ = None + self.BuyerRecipient = BuyerRecipient + self.BuyerRecipient_nsprefix_ = None + self.TermsPayment = TermsPayment + self.TermsPayment_nsprefix_ = None + self.ActionCode = ActionCode + self.ActionCode_nsprefix_ = None + self.OptOutOfSPE = OptOutOfSPE + self.OptOutOfSPE_nsprefix_ = None + self.PermitNumber = PermitNumber + self.PermitNumber_nsprefix_ = None + self.AccountZipCode = AccountZipCode + self.AccountZipCode_nsprefix_ = None + self.Machinable = Machinable + self.Machinable_nsprefix_ = None + self.DestinationRateIndicator = DestinationRateIndicator + self.DestinationRateIndicator_nsprefix_ = None + self.MID = MID + self.MID_nsprefix_ = None + self.LogisticsManagerMID = LogisticsManagerMID + self.LogisticsManagerMID_nsprefix_ = None + self.CRID = CRID + self.CRID_nsprefix_ = None + self.VendorCode = VendorCode + self.VendorCode_nsprefix_ = None + self.VendorProductVersionNumber = VendorProductVersionNumber + self.VendorProductVersionNumber_nsprefix_ = None + self.OverrideMID = OverrideMID + self.OverrideMID_nsprefix_ = None + self.ChargebackCode = ChargebackCode + self.ChargebackCode_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, eVSGXGGetLabelRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if eVSGXGGetLabelRequest.subclass: + return eVSGXGGetLabelRequest.subclass(*args_, **kwargs_) + else: + return eVSGXGGetLabelRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Option(self): + return self.Option + def set_Option(self, Option): + self.Option = Option + def get_Revision(self): + return self.Revision + def set_Revision(self, Revision): + self.Revision = Revision + def get_ImageParameters(self): + return self.ImageParameters + def set_ImageParameters(self, ImageParameters): + self.ImageParameters = ImageParameters + def get_FromFirstName(self): + return self.FromFirstName + def set_FromFirstName(self, FromFirstName): + self.FromFirstName = FromFirstName + def get_FromMiddleInitial(self): + return self.FromMiddleInitial + def set_FromMiddleInitial(self, FromMiddleInitial): + self.FromMiddleInitial = FromMiddleInitial + def get_FromLastName(self): + return self.FromLastName + def set_FromLastName(self, FromLastName): + self.FromLastName = FromLastName + def get_FromFirm(self): + return self.FromFirm + def set_FromFirm(self, FromFirm): + self.FromFirm = FromFirm + def get_FromAddress1(self): + return self.FromAddress1 + def set_FromAddress1(self, FromAddress1): + self.FromAddress1 = FromAddress1 + def get_FromAddress2(self): + return self.FromAddress2 + def set_FromAddress2(self, FromAddress2): + self.FromAddress2 = FromAddress2 + def get_FromUrbanization(self): + return self.FromUrbanization + def set_FromUrbanization(self, FromUrbanization): + self.FromUrbanization = FromUrbanization + def get_FromCity(self): + return self.FromCity + def set_FromCity(self, FromCity): + self.FromCity = FromCity + def get_FromState(self): + return self.FromState + def set_FromState(self, FromState): + self.FromState = FromState + def get_FromZIP5(self): + return self.FromZIP5 + def set_FromZIP5(self, FromZIP5): + self.FromZIP5 = FromZIP5 + def get_FromZIP4(self): + return self.FromZIP4 + def set_FromZIP4(self, FromZIP4): + self.FromZIP4 = FromZIP4 + def get_FromPhone(self): + return self.FromPhone + def set_FromPhone(self, FromPhone): + self.FromPhone = FromPhone + def get_ShipFromZIP(self): + return self.ShipFromZIP + def set_ShipFromZIP(self, ShipFromZIP): + self.ShipFromZIP = ShipFromZIP + def get_SenderEMail(self): + return self.SenderEMail + def set_SenderEMail(self, SenderEMail): + self.SenderEMail = SenderEMail + def get_ToFirstName(self): + return self.ToFirstName + def set_ToFirstName(self, ToFirstName): + self.ToFirstName = ToFirstName + def get_ToLastName(self): + return self.ToLastName + def set_ToLastName(self, ToLastName): + self.ToLastName = ToLastName + def get_ToFirm(self): + return self.ToFirm + def set_ToFirm(self, ToFirm): + self.ToFirm = ToFirm + def get_ToAddress1(self): + return self.ToAddress1 + def set_ToAddress1(self, ToAddress1): + self.ToAddress1 = ToAddress1 + def get_ToAddress2(self): + return self.ToAddress2 + def set_ToAddress2(self, ToAddress2): + self.ToAddress2 = ToAddress2 + def get_ToAddress3(self): + return self.ToAddress3 + def set_ToAddress3(self, ToAddress3): + self.ToAddress3 = ToAddress3 + def get_ToPostalCode(self): + return self.ToPostalCode + def set_ToPostalCode(self, ToPostalCode): + self.ToPostalCode = ToPostalCode + def get_ToPhone(self): + return self.ToPhone + def set_ToPhone(self, ToPhone): + self.ToPhone = ToPhone + def get_RecipientEMail(self): + return self.RecipientEMail + def set_RecipientEMail(self, RecipientEMail): + self.RecipientEMail = RecipientEMail + def get_ToDPID(self): + return self.ToDPID + def set_ToDPID(self, ToDPID): + self.ToDPID = ToDPID + def get_ToProvince(self): + return self.ToProvince + def set_ToProvince(self, ToProvince): + self.ToProvince = ToProvince + def get_ToTaxID(self): + return self.ToTaxID + def set_ToTaxID(self, ToTaxID): + self.ToTaxID = ToTaxID + def get_Container(self): + return self.Container + def set_Container(self, Container): + self.Container = Container + def get_ContentType(self): + return self.ContentType + def set_ContentType(self, ContentType): + self.ContentType = ContentType + def get_ShippingContents(self): + return self.ShippingContents + def set_ShippingContents(self, ShippingContents): + self.ShippingContents = ShippingContents + def get_PurposeOfShipment(self): + return self.PurposeOfShipment + def set_PurposeOfShipment(self, PurposeOfShipment): + self.PurposeOfShipment = PurposeOfShipment + def get_PartiesToTransaction(self): + return self.PartiesToTransaction + def set_PartiesToTransaction(self, PartiesToTransaction): + self.PartiesToTransaction = PartiesToTransaction + def get_Agreement(self): + return self.Agreement + def set_Agreement(self, Agreement): + self.Agreement = Agreement + def get_Postage(self): + return self.Postage + def set_Postage(self, Postage): + self.Postage = Postage + def get_InsuredValue(self): + return self.InsuredValue + def set_InsuredValue(self, InsuredValue): + self.InsuredValue = InsuredValue + def get_GrossPounds(self): + return self.GrossPounds + def set_GrossPounds(self, GrossPounds): + self.GrossPounds = GrossPounds + def get_GrossOunces(self): + return self.GrossOunces + def set_GrossOunces(self, GrossOunces): + self.GrossOunces = GrossOunces + def get_Length(self): + return self.Length + def set_Length(self, Length): + self.Length = Length + def get_Width(self): + return self.Width + def set_Width(self, Width): + self.Width = Width + def get_Height(self): + return self.Height + def set_Height(self, Height): + self.Height = Height + def get_Girth(self): + return self.Girth + def set_Girth(self, Girth): + self.Girth = Girth + def get_Shape(self): + return self.Shape + def set_Shape(self, Shape): + self.Shape = Shape + def get_CIRequired(self): + return self.CIRequired + def set_CIRequired(self, CIRequired): + self.CIRequired = CIRequired + def get_InvoiceDate(self): + return self.InvoiceDate + def set_InvoiceDate(self, InvoiceDate): + self.InvoiceDate = InvoiceDate + def get_InvoiceNumber(self): + return self.InvoiceNumber + def set_InvoiceNumber(self, InvoiceNumber): + self.InvoiceNumber = InvoiceNumber + def get_CustomerOrderNumber(self): + return self.CustomerOrderNumber + def set_CustomerOrderNumber(self, CustomerOrderNumber): + self.CustomerOrderNumber = CustomerOrderNumber + def get_CustOrderNumber(self): + return self.CustOrderNumber + def set_CustOrderNumber(self, CustOrderNumber): + self.CustOrderNumber = CustOrderNumber + def get_TermsDelivery(self): + return self.TermsDelivery + def set_TermsDelivery(self, TermsDelivery): + self.TermsDelivery = TermsDelivery + def get_TermsDeliveryOther(self): + return self.TermsDeliveryOther + def set_TermsDeliveryOther(self, TermsDeliveryOther): + self.TermsDeliveryOther = TermsDeliveryOther + def get_PackingCost(self): + return self.PackingCost + def set_PackingCost(self, PackingCost): + self.PackingCost = PackingCost + def get_CountryUltDest(self): + return self.CountryUltDest + def set_CountryUltDest(self, CountryUltDest): + self.CountryUltDest = CountryUltDest + def get_CIAgreement(self): + return self.CIAgreement + def set_CIAgreement(self, CIAgreement): + self.CIAgreement = CIAgreement + def get_ImageType(self): + return self.ImageType + def set_ImageType(self, ImageType): + self.ImageType = ImageType + def get_ImageLayout(self): + return self.ImageLayout + def set_ImageLayout(self, ImageLayout): + self.ImageLayout = ImageLayout + def get_CustomerRefNo(self): + return self.CustomerRefNo + def set_CustomerRefNo(self, CustomerRefNo): + self.CustomerRefNo = CustomerRefNo + def get_CustomerRefNo2(self): + return self.CustomerRefNo2 + def set_CustomerRefNo2(self, CustomerRefNo2): + self.CustomerRefNo2 = CustomerRefNo2 + def get_ShipDate(self): + return self.ShipDate + def set_ShipDate(self, ShipDate): + self.ShipDate = ShipDate + def get_HoldForManifest(self): + return self.HoldForManifest + def set_HoldForManifest(self, HoldForManifest): + self.HoldForManifest = HoldForManifest + def get_PriceOptions(self): + return self.PriceOptions + def set_PriceOptions(self, PriceOptions): + self.PriceOptions = PriceOptions + def get_CommercialShipment(self): + return self.CommercialShipment + def set_CommercialShipment(self, CommercialShipment): + self.CommercialShipment = CommercialShipment + def get_BuyerFirstName(self): + return self.BuyerFirstName + def set_BuyerFirstName(self, BuyerFirstName): + self.BuyerFirstName = BuyerFirstName + def get_BuyerLastName(self): + return self.BuyerLastName + def set_BuyerLastName(self, BuyerLastName): + self.BuyerLastName = BuyerLastName + def get_BuyerAddress1(self): + return self.BuyerAddress1 + def set_BuyerAddress1(self, BuyerAddress1): + self.BuyerAddress1 = BuyerAddress1 + def get_BuyerAddress2(self): + return self.BuyerAddress2 + def set_BuyerAddress2(self, BuyerAddress2): + self.BuyerAddress2 = BuyerAddress2 + def get_BuyerAddress3(self): + return self.BuyerAddress3 + def set_BuyerAddress3(self, BuyerAddress3): + self.BuyerAddress3 = BuyerAddress3 + def get_BuyerCity(self): + return self.BuyerCity + def set_BuyerCity(self, BuyerCity): + self.BuyerCity = BuyerCity + def get_BuyerState(self): + return self.BuyerState + def set_BuyerState(self, BuyerState): + self.BuyerState = BuyerState + def get_BuyerPostalCode(self): + return self.BuyerPostalCode + def set_BuyerPostalCode(self, BuyerPostalCode): + self.BuyerPostalCode = BuyerPostalCode + def get_BuyerCountry(self): + return self.BuyerCountry + def set_BuyerCountry(self, BuyerCountry): + self.BuyerCountry = BuyerCountry + def get_BuyerTaxID(self): + return self.BuyerTaxID + def set_BuyerTaxID(self, BuyerTaxID): + self.BuyerTaxID = BuyerTaxID + def get_BuyerRecipient(self): + return self.BuyerRecipient + def set_BuyerRecipient(self, BuyerRecipient): + self.BuyerRecipient = BuyerRecipient + def get_TermsPayment(self): + return self.TermsPayment + def set_TermsPayment(self, TermsPayment): + self.TermsPayment = TermsPayment + def get_ActionCode(self): + return self.ActionCode + def set_ActionCode(self, ActionCode): + self.ActionCode = ActionCode + def get_OptOutOfSPE(self): + return self.OptOutOfSPE + def set_OptOutOfSPE(self, OptOutOfSPE): + self.OptOutOfSPE = OptOutOfSPE + def get_PermitNumber(self): + return self.PermitNumber + def set_PermitNumber(self, PermitNumber): + self.PermitNumber = PermitNumber + def get_AccountZipCode(self): + return self.AccountZipCode + def set_AccountZipCode(self, AccountZipCode): + self.AccountZipCode = AccountZipCode + def get_Machinable(self): + return self.Machinable + def set_Machinable(self, Machinable): + self.Machinable = Machinable + def get_DestinationRateIndicator(self): + return self.DestinationRateIndicator + def set_DestinationRateIndicator(self, DestinationRateIndicator): + self.DestinationRateIndicator = DestinationRateIndicator + def get_MID(self): + return self.MID + def set_MID(self, MID): + self.MID = MID + def get_LogisticsManagerMID(self): + return self.LogisticsManagerMID + def set_LogisticsManagerMID(self, LogisticsManagerMID): + self.LogisticsManagerMID = LogisticsManagerMID + def get_CRID(self): + return self.CRID + def set_CRID(self, CRID): + self.CRID = CRID + def get_VendorCode(self): + return self.VendorCode + def set_VendorCode(self, VendorCode): + self.VendorCode = VendorCode + def get_VendorProductVersionNumber(self): + return self.VendorProductVersionNumber + def set_VendorProductVersionNumber(self, VendorProductVersionNumber): + self.VendorProductVersionNumber = VendorProductVersionNumber + def get_OverrideMID(self): + return self.OverrideMID + def set_OverrideMID(self, OverrideMID): + self.OverrideMID = OverrideMID + def get_ChargebackCode(self): + return self.ChargebackCode + def set_ChargebackCode(self, ChargebackCode): + self.ChargebackCode = ChargebackCode + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.Option is not None or + self.Revision is not None or + self.ImageParameters is not None or + self.FromFirstName is not None or + self.FromMiddleInitial is not None or + self.FromLastName is not None or + self.FromFirm is not None or + self.FromAddress1 is not None or + self.FromAddress2 is not None or + self.FromUrbanization is not None or + self.FromCity is not None or + self.FromState is not None or + self.FromZIP5 is not None or + self.FromZIP4 is not None or + self.FromPhone is not None or + self.ShipFromZIP is not None or + self.SenderEMail is not None or + self.ToFirstName is not None or + self.ToLastName is not None or + self.ToFirm is not None or + self.ToAddress1 is not None or + self.ToAddress2 is not None or + self.ToAddress3 is not None or + self.ToPostalCode is not None or + self.ToPhone is not None or + self.RecipientEMail is not None or + self.ToDPID is not None or + self.ToProvince is not None or + self.ToTaxID is not None or + self.Container is not None or + self.ContentType is not None or + self.ShippingContents is not None or + self.PurposeOfShipment is not None or + self.PartiesToTransaction is not None or + self.Agreement is not None or + self.Postage is not None or + self.InsuredValue is not None or + self.GrossPounds is not None or + self.GrossOunces is not None or + self.Length is not None or + self.Width is not None or + self.Height is not None or + self.Girth is not None or + self.Shape is not None or + self.CIRequired is not None or + self.InvoiceDate is not None or + self.InvoiceNumber is not None or + self.CustomerOrderNumber is not None or + self.CustOrderNumber is not None or + self.TermsDelivery is not None or + self.TermsDeliveryOther is not None or + self.PackingCost is not None or + self.CountryUltDest is not None or + self.CIAgreement is not None or + self.ImageType is not None or + self.ImageLayout is not None or + self.CustomerRefNo is not None or + self.CustomerRefNo2 is not None or + self.ShipDate is not None or + self.HoldForManifest is not None or + self.PriceOptions is not None or + self.CommercialShipment is not None or + self.BuyerFirstName is not None or + self.BuyerLastName is not None or + self.BuyerAddress1 is not None or + self.BuyerAddress2 is not None or + self.BuyerAddress3 is not None or + self.BuyerCity is not None or + self.BuyerState is not None or + self.BuyerPostalCode is not None or + self.BuyerCountry is not None or + self.BuyerTaxID is not None or + self.BuyerRecipient is not None or + self.TermsPayment is not None or + self.ActionCode is not None or + self.OptOutOfSPE is not None or + self.PermitNumber is not None or + self.AccountZipCode is not None or + self.Machinable is not None or + self.DestinationRateIndicator is not None or + self.MID is not None or + self.LogisticsManagerMID is not None or + self.CRID is not None or + self.VendorCode is not None or + self.VendorProductVersionNumber is not None or + self.OverrideMID is not None or + self.ChargebackCode is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSGXGGetLabelRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('eVSGXGGetLabelRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'eVSGXGGetLabelRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='eVSGXGGetLabelRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='eVSGXGGetLabelRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='eVSGXGGetLabelRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSGXGGetLabelRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Option is not None: + namespaceprefix_ = self.Option_nsprefix_ + ':' if (UseCapturedNS_ and self.Option_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOption>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Option), input_name='Option')), namespaceprefix_ , eol_)) + if self.Revision is not None: + namespaceprefix_ = self.Revision_nsprefix_ + ':' if (UseCapturedNS_ and self.Revision_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRevision>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Revision), input_name='Revision')), namespaceprefix_ , eol_)) + if self.ImageParameters is not None: + namespaceprefix_ = self.ImageParameters_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageParameters_nsprefix_) else '' + self.ImageParameters.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ImageParameters', pretty_print=pretty_print) + if self.FromFirstName is not None: + namespaceprefix_ = self.FromFirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.FromFirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromFirstName), input_name='FromFirstName')), namespaceprefix_ , eol_)) + if self.FromMiddleInitial is not None: + namespaceprefix_ = self.FromMiddleInitial_nsprefix_ + ':' if (UseCapturedNS_ and self.FromMiddleInitial_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromMiddleInitial>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromMiddleInitial), input_name='FromMiddleInitial')), namespaceprefix_ , eol_)) + if self.FromLastName is not None: + namespaceprefix_ = self.FromLastName_nsprefix_ + ':' if (UseCapturedNS_ and self.FromLastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromLastName), input_name='FromLastName')), namespaceprefix_ , eol_)) + if self.FromFirm is not None: + namespaceprefix_ = self.FromFirm_nsprefix_ + ':' if (UseCapturedNS_ and self.FromFirm_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromFirm>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromFirm), input_name='FromFirm')), namespaceprefix_ , eol_)) + if self.FromAddress1 is not None: + namespaceprefix_ = self.FromAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.FromAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromAddress1), input_name='FromAddress1')), namespaceprefix_ , eol_)) + if self.FromAddress2 is not None: + namespaceprefix_ = self.FromAddress2_nsprefix_ + ':' if (UseCapturedNS_ and self.FromAddress2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromAddress2), input_name='FromAddress2')), namespaceprefix_ , eol_)) + if self.FromUrbanization is not None: + namespaceprefix_ = self.FromUrbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.FromUrbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromUrbanization), input_name='FromUrbanization')), namespaceprefix_ , eol_)) + if self.FromCity is not None: + namespaceprefix_ = self.FromCity_nsprefix_ + ':' if (UseCapturedNS_ and self.FromCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromCity), input_name='FromCity')), namespaceprefix_ , eol_)) + if self.FromState is not None: + namespaceprefix_ = self.FromState_nsprefix_ + ':' if (UseCapturedNS_ and self.FromState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromState), input_name='FromState')), namespaceprefix_ , eol_)) + if self.FromZIP5 is not None: + namespaceprefix_ = self.FromZIP5_nsprefix_ + ':' if (UseCapturedNS_ and self.FromZIP5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromZIP5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromZIP5), input_name='FromZIP5')), namespaceprefix_ , eol_)) + if self.FromZIP4 is not None: + namespaceprefix_ = self.FromZIP4_nsprefix_ + ':' if (UseCapturedNS_ and self.FromZIP4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromZIP4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromZIP4), input_name='FromZIP4')), namespaceprefix_ , eol_)) + if self.FromPhone is not None: + namespaceprefix_ = self.FromPhone_nsprefix_ + ':' if (UseCapturedNS_ and self.FromPhone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromPhone), input_name='FromPhone')), namespaceprefix_ , eol_)) + if self.ShipFromZIP is not None: + namespaceprefix_ = self.ShipFromZIP_nsprefix_ + ':' if (UseCapturedNS_ and self.ShipFromZIP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sShipFromZIP>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ShipFromZIP), input_name='ShipFromZIP')), namespaceprefix_ , eol_)) + if self.SenderEMail is not None: + namespaceprefix_ = self.SenderEMail_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderEMail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderEMail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderEMail), input_name='SenderEMail')), namespaceprefix_ , eol_)) + if self.ToFirstName is not None: + namespaceprefix_ = self.ToFirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.ToFirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToFirstName), input_name='ToFirstName')), namespaceprefix_ , eol_)) + if self.ToLastName is not None: + namespaceprefix_ = self.ToLastName_nsprefix_ + ':' if (UseCapturedNS_ and self.ToLastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToLastName), input_name='ToLastName')), namespaceprefix_ , eol_)) + if self.ToFirm is not None: + namespaceprefix_ = self.ToFirm_nsprefix_ + ':' if (UseCapturedNS_ and self.ToFirm_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToFirm>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToFirm), input_name='ToFirm')), namespaceprefix_ , eol_)) + if self.ToAddress1 is not None: + namespaceprefix_ = self.ToAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress1), input_name='ToAddress1')), namespaceprefix_ , eol_)) + if self.ToAddress2 is not None: + namespaceprefix_ = self.ToAddress2_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress2), input_name='ToAddress2')), namespaceprefix_ , eol_)) + if self.ToAddress3 is not None: + namespaceprefix_ = self.ToAddress3_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress3_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress3>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress3), input_name='ToAddress3')), namespaceprefix_ , eol_)) + if self.ToPostalCode is not None: + namespaceprefix_ = self.ToPostalCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ToPostalCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToPostalCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToPostalCode), input_name='ToPostalCode')), namespaceprefix_ , eol_)) + if self.ToPhone is not None: + namespaceprefix_ = self.ToPhone_nsprefix_ + ':' if (UseCapturedNS_ and self.ToPhone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToPhone), input_name='ToPhone')), namespaceprefix_ , eol_)) + if self.RecipientEMail is not None: + namespaceprefix_ = self.RecipientEMail_nsprefix_ + ':' if (UseCapturedNS_ and self.RecipientEMail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRecipientEMail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RecipientEMail), input_name='RecipientEMail')), namespaceprefix_ , eol_)) + if self.ToDPID is not None: + namespaceprefix_ = self.ToDPID_nsprefix_ + ':' if (UseCapturedNS_ and self.ToDPID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToDPID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToDPID), input_name='ToDPID')), namespaceprefix_ , eol_)) + if self.ToProvince is not None: + namespaceprefix_ = self.ToProvince_nsprefix_ + ':' if (UseCapturedNS_ and self.ToProvince_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToProvince>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToProvince), input_name='ToProvince')), namespaceprefix_ , eol_)) + if self.ToTaxID is not None: + namespaceprefix_ = self.ToTaxID_nsprefix_ + ':' if (UseCapturedNS_ and self.ToTaxID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToTaxID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToTaxID), input_name='ToTaxID')), namespaceprefix_ , eol_)) + if self.Container is not None: + namespaceprefix_ = self.Container_nsprefix_ + ':' if (UseCapturedNS_ and self.Container_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContainer>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Container), input_name='Container')), namespaceprefix_ , eol_)) + if self.ContentType is not None: + namespaceprefix_ = self.ContentType_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentType), input_name='ContentType')), namespaceprefix_ , eol_)) + if self.ShippingContents is not None: + namespaceprefix_ = self.ShippingContents_nsprefix_ + ':' if (UseCapturedNS_ and self.ShippingContents_nsprefix_) else '' + self.ShippingContents.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ShippingContents', pretty_print=pretty_print) + if self.PurposeOfShipment is not None: + namespaceprefix_ = self.PurposeOfShipment_nsprefix_ + ':' if (UseCapturedNS_ and self.PurposeOfShipment_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPurposeOfShipment>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PurposeOfShipment), input_name='PurposeOfShipment')), namespaceprefix_ , eol_)) + if self.PartiesToTransaction is not None: + namespaceprefix_ = self.PartiesToTransaction_nsprefix_ + ':' if (UseCapturedNS_ and self.PartiesToTransaction_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPartiesToTransaction>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PartiesToTransaction), input_name='PartiesToTransaction')), namespaceprefix_ , eol_)) + if self.Agreement is not None: + namespaceprefix_ = self.Agreement_nsprefix_ + ':' if (UseCapturedNS_ and self.Agreement_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAgreement>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Agreement), input_name='Agreement')), namespaceprefix_ , eol_)) + if self.Postage is not None: + namespaceprefix_ = self.Postage_nsprefix_ + ':' if (UseCapturedNS_ and self.Postage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPostage>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Postage), input_name='Postage')), namespaceprefix_ , eol_)) + if self.InsuredValue is not None: + namespaceprefix_ = self.InsuredValue_nsprefix_ + ':' if (UseCapturedNS_ and self.InsuredValue_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInsuredValue>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InsuredValue), input_name='InsuredValue')), namespaceprefix_ , eol_)) + if self.GrossPounds is not None: + namespaceprefix_ = self.GrossPounds_nsprefix_ + ':' if (UseCapturedNS_ and self.GrossPounds_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGrossPounds>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.GrossPounds, input_name='GrossPounds'), namespaceprefix_ , eol_)) + if self.GrossOunces is not None: + namespaceprefix_ = self.GrossOunces_nsprefix_ + ':' if (UseCapturedNS_ and self.GrossOunces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGrossOunces>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.GrossOunces, input_name='GrossOunces'), namespaceprefix_ , eol_)) + if self.Length is not None: + namespaceprefix_ = self.Length_nsprefix_ + ':' if (UseCapturedNS_ and self.Length_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLength>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Length, input_name='Length'), namespaceprefix_ , eol_)) + if self.Width is not None: + namespaceprefix_ = self.Width_nsprefix_ + ':' if (UseCapturedNS_ and self.Width_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sWidth>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Width, input_name='Width'), namespaceprefix_ , eol_)) + if self.Height is not None: + namespaceprefix_ = self.Height_nsprefix_ + ':' if (UseCapturedNS_ and self.Height_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHeight>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Height, input_name='Height'), namespaceprefix_ , eol_)) + if self.Girth is not None: + namespaceprefix_ = self.Girth_nsprefix_ + ':' if (UseCapturedNS_ and self.Girth_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGirth>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Girth, input_name='Girth'), namespaceprefix_ , eol_)) + if self.Shape is not None: + namespaceprefix_ = self.Shape_nsprefix_ + ':' if (UseCapturedNS_ and self.Shape_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sShape>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Shape), input_name='Shape')), namespaceprefix_ , eol_)) + if self.CIRequired is not None: + namespaceprefix_ = self.CIRequired_nsprefix_ + ':' if (UseCapturedNS_ and self.CIRequired_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCIRequired>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.CIRequired, input_name='CIRequired'), namespaceprefix_ , eol_)) + if self.InvoiceDate is not None: + namespaceprefix_ = self.InvoiceDate_nsprefix_ + ':' if (UseCapturedNS_ and self.InvoiceDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInvoiceDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InvoiceDate), input_name='InvoiceDate')), namespaceprefix_ , eol_)) + if self.InvoiceNumber is not None: + namespaceprefix_ = self.InvoiceNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.InvoiceNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInvoiceNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InvoiceNumber), input_name='InvoiceNumber')), namespaceprefix_ , eol_)) + if self.CustomerOrderNumber is not None: + namespaceprefix_ = self.CustomerOrderNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerOrderNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerOrderNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerOrderNumber), input_name='CustomerOrderNumber')), namespaceprefix_ , eol_)) + if self.CustOrderNumber is not None: + namespaceprefix_ = self.CustOrderNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.CustOrderNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustOrderNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustOrderNumber), input_name='CustOrderNumber')), namespaceprefix_ , eol_)) + if self.TermsDelivery is not None: + namespaceprefix_ = self.TermsDelivery_nsprefix_ + ':' if (UseCapturedNS_ and self.TermsDelivery_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTermsDelivery>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TermsDelivery), input_name='TermsDelivery')), namespaceprefix_ , eol_)) + if self.TermsDeliveryOther is not None: + namespaceprefix_ = self.TermsDeliveryOther_nsprefix_ + ':' if (UseCapturedNS_ and self.TermsDeliveryOther_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTermsDeliveryOther>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TermsDeliveryOther), input_name='TermsDeliveryOther')), namespaceprefix_ , eol_)) + if self.PackingCost is not None: + namespaceprefix_ = self.PackingCost_nsprefix_ + ':' if (UseCapturedNS_ and self.PackingCost_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPackingCost>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.PackingCost, input_name='PackingCost'), namespaceprefix_ , eol_)) + if self.CountryUltDest is not None: + namespaceprefix_ = self.CountryUltDest_nsprefix_ + ':' if (UseCapturedNS_ and self.CountryUltDest_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCountryUltDest>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CountryUltDest), input_name='CountryUltDest')), namespaceprefix_ , eol_)) + if self.CIAgreement is not None: + namespaceprefix_ = self.CIAgreement_nsprefix_ + ':' if (UseCapturedNS_ and self.CIAgreement_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCIAgreement>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CIAgreement), input_name='CIAgreement')), namespaceprefix_ , eol_)) + if self.ImageType is not None: + namespaceprefix_ = self.ImageType_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageType), input_name='ImageType')), namespaceprefix_ , eol_)) + if self.ImageLayout is not None: + namespaceprefix_ = self.ImageLayout_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageLayout_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageLayout>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageLayout), input_name='ImageLayout')), namespaceprefix_ , eol_)) + if self.CustomerRefNo is not None: + namespaceprefix_ = self.CustomerRefNo_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerRefNo_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerRefNo>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerRefNo), input_name='CustomerRefNo')), namespaceprefix_ , eol_)) + if self.CustomerRefNo2 is not None: + namespaceprefix_ = self.CustomerRefNo2_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerRefNo2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerRefNo2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerRefNo2), input_name='CustomerRefNo2')), namespaceprefix_ , eol_)) + if self.ShipDate is not None: + namespaceprefix_ = self.ShipDate_nsprefix_ + ':' if (UseCapturedNS_ and self.ShipDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sShipDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ShipDate), input_name='ShipDate')), namespaceprefix_ , eol_)) + if self.HoldForManifest is not None: + namespaceprefix_ = self.HoldForManifest_nsprefix_ + ':' if (UseCapturedNS_ and self.HoldForManifest_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHoldForManifest>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.HoldForManifest), input_name='HoldForManifest')), namespaceprefix_ , eol_)) + if self.PriceOptions is not None: + namespaceprefix_ = self.PriceOptions_nsprefix_ + ':' if (UseCapturedNS_ and self.PriceOptions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPriceOptions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PriceOptions), input_name='PriceOptions')), namespaceprefix_ , eol_)) + if self.CommercialShipment is not None: + namespaceprefix_ = self.CommercialShipment_nsprefix_ + ':' if (UseCapturedNS_ and self.CommercialShipment_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommercialShipment>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.CommercialShipment, input_name='CommercialShipment'), namespaceprefix_ , eol_)) + if self.BuyerFirstName is not None: + namespaceprefix_ = self.BuyerFirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.BuyerFirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBuyerFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BuyerFirstName), input_name='BuyerFirstName')), namespaceprefix_ , eol_)) + if self.BuyerLastName is not None: + namespaceprefix_ = self.BuyerLastName_nsprefix_ + ':' if (UseCapturedNS_ and self.BuyerLastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBuyerLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BuyerLastName), input_name='BuyerLastName')), namespaceprefix_ , eol_)) + if self.BuyerAddress1 is not None: + namespaceprefix_ = self.BuyerAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.BuyerAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBuyerAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BuyerAddress1), input_name='BuyerAddress1')), namespaceprefix_ , eol_)) + if self.BuyerAddress2 is not None: + namespaceprefix_ = self.BuyerAddress2_nsprefix_ + ':' if (UseCapturedNS_ and self.BuyerAddress2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBuyerAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BuyerAddress2), input_name='BuyerAddress2')), namespaceprefix_ , eol_)) + if self.BuyerAddress3 is not None: + namespaceprefix_ = self.BuyerAddress3_nsprefix_ + ':' if (UseCapturedNS_ and self.BuyerAddress3_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBuyerAddress3>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BuyerAddress3), input_name='BuyerAddress3')), namespaceprefix_ , eol_)) + if self.BuyerCity is not None: + namespaceprefix_ = self.BuyerCity_nsprefix_ + ':' if (UseCapturedNS_ and self.BuyerCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBuyerCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BuyerCity), input_name='BuyerCity')), namespaceprefix_ , eol_)) + if self.BuyerState is not None: + namespaceprefix_ = self.BuyerState_nsprefix_ + ':' if (UseCapturedNS_ and self.BuyerState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBuyerState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BuyerState), input_name='BuyerState')), namespaceprefix_ , eol_)) + if self.BuyerPostalCode is not None: + namespaceprefix_ = self.BuyerPostalCode_nsprefix_ + ':' if (UseCapturedNS_ and self.BuyerPostalCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBuyerPostalCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BuyerPostalCode), input_name='BuyerPostalCode')), namespaceprefix_ , eol_)) + if self.BuyerCountry is not None: + namespaceprefix_ = self.BuyerCountry_nsprefix_ + ':' if (UseCapturedNS_ and self.BuyerCountry_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBuyerCountry>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BuyerCountry), input_name='BuyerCountry')), namespaceprefix_ , eol_)) + if self.BuyerTaxID is not None: + namespaceprefix_ = self.BuyerTaxID_nsprefix_ + ':' if (UseCapturedNS_ and self.BuyerTaxID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBuyerTaxID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BuyerTaxID), input_name='BuyerTaxID')), namespaceprefix_ , eol_)) + if self.BuyerRecipient is not None: + namespaceprefix_ = self.BuyerRecipient_nsprefix_ + ':' if (UseCapturedNS_ and self.BuyerRecipient_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBuyerRecipient>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BuyerRecipient), input_name='BuyerRecipient')), namespaceprefix_ , eol_)) + if self.TermsPayment is not None: + namespaceprefix_ = self.TermsPayment_nsprefix_ + ':' if (UseCapturedNS_ and self.TermsPayment_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTermsPayment>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TermsPayment), input_name='TermsPayment')), namespaceprefix_ , eol_)) + if self.ActionCode is not None: + namespaceprefix_ = self.ActionCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ActionCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sActionCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ActionCode), input_name='ActionCode')), namespaceprefix_ , eol_)) + if self.OptOutOfSPE is not None: + namespaceprefix_ = self.OptOutOfSPE_nsprefix_ + ':' if (UseCapturedNS_ and self.OptOutOfSPE_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOptOutOfSPE>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.OptOutOfSPE, input_name='OptOutOfSPE'), namespaceprefix_ , eol_)) + if self.PermitNumber is not None: + namespaceprefix_ = self.PermitNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PermitNumber), input_name='PermitNumber')), namespaceprefix_ , eol_)) + if self.AccountZipCode is not None: + namespaceprefix_ = self.AccountZipCode_nsprefix_ + ':' if (UseCapturedNS_ and self.AccountZipCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAccountZipCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AccountZipCode), input_name='AccountZipCode')), namespaceprefix_ , eol_)) + if self.Machinable is not None: + namespaceprefix_ = self.Machinable_nsprefix_ + ':' if (UseCapturedNS_ and self.Machinable_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMachinable>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.Machinable, input_name='Machinable'), namespaceprefix_ , eol_)) + if self.DestinationRateIndicator is not None: + namespaceprefix_ = self.DestinationRateIndicator_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationRateIndicator_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationRateIndicator>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DestinationRateIndicator), input_name='DestinationRateIndicator')), namespaceprefix_ , eol_)) + if self.MID is not None: + namespaceprefix_ = self.MID_nsprefix_ + ':' if (UseCapturedNS_ and self.MID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MID), input_name='MID')), namespaceprefix_ , eol_)) + if self.LogisticsManagerMID is not None: + namespaceprefix_ = self.LogisticsManagerMID_nsprefix_ + ':' if (UseCapturedNS_ and self.LogisticsManagerMID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLogisticsManagerMID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LogisticsManagerMID), input_name='LogisticsManagerMID')), namespaceprefix_ , eol_)) + if self.CRID is not None: + namespaceprefix_ = self.CRID_nsprefix_ + ':' if (UseCapturedNS_ and self.CRID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCRID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CRID), input_name='CRID')), namespaceprefix_ , eol_)) + if self.VendorCode is not None: + namespaceprefix_ = self.VendorCode_nsprefix_ + ':' if (UseCapturedNS_ and self.VendorCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sVendorCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.VendorCode), input_name='VendorCode')), namespaceprefix_ , eol_)) + if self.VendorProductVersionNumber is not None: + namespaceprefix_ = self.VendorProductVersionNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.VendorProductVersionNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sVendorProductVersionNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.VendorProductVersionNumber), input_name='VendorProductVersionNumber')), namespaceprefix_ , eol_)) + if self.OverrideMID is not None: + namespaceprefix_ = self.OverrideMID_nsprefix_ + ':' if (UseCapturedNS_ and self.OverrideMID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOverrideMID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.OverrideMID), input_name='OverrideMID')), namespaceprefix_ , eol_)) + if self.ChargebackCode is not None: + namespaceprefix_ = self.ChargebackCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ChargebackCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sChargebackCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ChargebackCode), input_name='ChargebackCode')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Option': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Option') + value_ = self.gds_validate_string(value_, node, 'Option') + self.Option = value_ + self.Option_nsprefix_ = child_.prefix + elif nodeName_ == 'Revision': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Revision') + value_ = self.gds_validate_string(value_, node, 'Revision') + self.Revision = value_ + self.Revision_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageParameters': + obj_ = ImageParametersType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ImageParameters = obj_ + obj_.original_tagname_ = 'ImageParameters' + elif nodeName_ == 'FromFirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromFirstName') + value_ = self.gds_validate_string(value_, node, 'FromFirstName') + self.FromFirstName = value_ + self.FromFirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'FromMiddleInitial': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromMiddleInitial') + value_ = self.gds_validate_string(value_, node, 'FromMiddleInitial') + self.FromMiddleInitial = value_ + self.FromMiddleInitial_nsprefix_ = child_.prefix + elif nodeName_ == 'FromLastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromLastName') + value_ = self.gds_validate_string(value_, node, 'FromLastName') + self.FromLastName = value_ + self.FromLastName_nsprefix_ = child_.prefix + elif nodeName_ == 'FromFirm': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromFirm') + value_ = self.gds_validate_string(value_, node, 'FromFirm') + self.FromFirm = value_ + self.FromFirm_nsprefix_ = child_.prefix + elif nodeName_ == 'FromAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromAddress1') + value_ = self.gds_validate_string(value_, node, 'FromAddress1') + self.FromAddress1 = value_ + self.FromAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'FromAddress2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromAddress2') + value_ = self.gds_validate_string(value_, node, 'FromAddress2') + self.FromAddress2 = value_ + self.FromAddress2_nsprefix_ = child_.prefix + elif nodeName_ == 'FromUrbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromUrbanization') + value_ = self.gds_validate_string(value_, node, 'FromUrbanization') + self.FromUrbanization = value_ + self.FromUrbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'FromCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromCity') + value_ = self.gds_validate_string(value_, node, 'FromCity') + self.FromCity = value_ + self.FromCity_nsprefix_ = child_.prefix + elif nodeName_ == 'FromState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromState') + value_ = self.gds_validate_string(value_, node, 'FromState') + self.FromState = value_ + self.FromState_nsprefix_ = child_.prefix + elif nodeName_ == 'FromZIP5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromZIP5') + value_ = self.gds_validate_string(value_, node, 'FromZIP5') + self.FromZIP5 = value_ + self.FromZIP5_nsprefix_ = child_.prefix + elif nodeName_ == 'FromZIP4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromZIP4') + value_ = self.gds_validate_string(value_, node, 'FromZIP4') + self.FromZIP4 = value_ + self.FromZIP4_nsprefix_ = child_.prefix + elif nodeName_ == 'FromPhone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromPhone') + value_ = self.gds_validate_string(value_, node, 'FromPhone') + self.FromPhone = value_ + self.FromPhone_nsprefix_ = child_.prefix + elif nodeName_ == 'ShipFromZIP': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ShipFromZIP') + value_ = self.gds_validate_string(value_, node, 'ShipFromZIP') + self.ShipFromZIP = value_ + self.ShipFromZIP_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderEMail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderEMail') + value_ = self.gds_validate_string(value_, node, 'SenderEMail') + self.SenderEMail = value_ + self.SenderEMail_nsprefix_ = child_.prefix + elif nodeName_ == 'ToFirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToFirstName') + value_ = self.gds_validate_string(value_, node, 'ToFirstName') + self.ToFirstName = value_ + self.ToFirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'ToLastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToLastName') + value_ = self.gds_validate_string(value_, node, 'ToLastName') + self.ToLastName = value_ + self.ToLastName_nsprefix_ = child_.prefix + elif nodeName_ == 'ToFirm': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToFirm') + value_ = self.gds_validate_string(value_, node, 'ToFirm') + self.ToFirm = value_ + self.ToFirm_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress1') + value_ = self.gds_validate_string(value_, node, 'ToAddress1') + self.ToAddress1 = value_ + self.ToAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress2') + value_ = self.gds_validate_string(value_, node, 'ToAddress2') + self.ToAddress2 = value_ + self.ToAddress2_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress3': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress3') + value_ = self.gds_validate_string(value_, node, 'ToAddress3') + self.ToAddress3 = value_ + self.ToAddress3_nsprefix_ = child_.prefix + elif nodeName_ == 'ToPostalCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToPostalCode') + value_ = self.gds_validate_string(value_, node, 'ToPostalCode') + self.ToPostalCode = value_ + self.ToPostalCode_nsprefix_ = child_.prefix + elif nodeName_ == 'ToPhone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToPhone') + value_ = self.gds_validate_string(value_, node, 'ToPhone') + self.ToPhone = value_ + self.ToPhone_nsprefix_ = child_.prefix + elif nodeName_ == 'RecipientEMail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RecipientEMail') + value_ = self.gds_validate_string(value_, node, 'RecipientEMail') + self.RecipientEMail = value_ + self.RecipientEMail_nsprefix_ = child_.prefix + elif nodeName_ == 'ToDPID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToDPID') + value_ = self.gds_validate_string(value_, node, 'ToDPID') + self.ToDPID = value_ + self.ToDPID_nsprefix_ = child_.prefix + elif nodeName_ == 'ToProvince': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToProvince') + value_ = self.gds_validate_string(value_, node, 'ToProvince') + self.ToProvince = value_ + self.ToProvince_nsprefix_ = child_.prefix + elif nodeName_ == 'ToTaxID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToTaxID') + value_ = self.gds_validate_string(value_, node, 'ToTaxID') + self.ToTaxID = value_ + self.ToTaxID_nsprefix_ = child_.prefix + elif nodeName_ == 'Container': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Container') + value_ = self.gds_validate_string(value_, node, 'Container') + self.Container = value_ + self.Container_nsprefix_ = child_.prefix + elif nodeName_ == 'ContentType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentType') + value_ = self.gds_validate_string(value_, node, 'ContentType') + self.ContentType = value_ + self.ContentType_nsprefix_ = child_.prefix + elif nodeName_ == 'ShippingContents': + obj_ = ShippingContentsType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ShippingContents = obj_ + obj_.original_tagname_ = 'ShippingContents' + elif nodeName_ == 'PurposeOfShipment': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PurposeOfShipment') + value_ = self.gds_validate_string(value_, node, 'PurposeOfShipment') + self.PurposeOfShipment = value_ + self.PurposeOfShipment_nsprefix_ = child_.prefix + elif nodeName_ == 'PartiesToTransaction': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PartiesToTransaction') + value_ = self.gds_validate_string(value_, node, 'PartiesToTransaction') + self.PartiesToTransaction = value_ + self.PartiesToTransaction_nsprefix_ = child_.prefix + elif nodeName_ == 'Agreement': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Agreement') + value_ = self.gds_validate_string(value_, node, 'Agreement') + self.Agreement = value_ + self.Agreement_nsprefix_ = child_.prefix + elif nodeName_ == 'Postage': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Postage') + value_ = self.gds_validate_string(value_, node, 'Postage') + self.Postage = value_ + self.Postage_nsprefix_ = child_.prefix + elif nodeName_ == 'InsuredValue': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'InsuredValue') + value_ = self.gds_validate_string(value_, node, 'InsuredValue') + self.InsuredValue = value_ + self.InsuredValue_nsprefix_ = child_.prefix + elif nodeName_ == 'GrossPounds' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'GrossPounds') + fval_ = self.gds_validate_decimal(fval_, node, 'GrossPounds') + self.GrossPounds = fval_ + self.GrossPounds_nsprefix_ = child_.prefix + elif nodeName_ == 'GrossOunces' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'GrossOunces') + fval_ = self.gds_validate_decimal(fval_, node, 'GrossOunces') + self.GrossOunces = fval_ + self.GrossOunces_nsprefix_ = child_.prefix + elif nodeName_ == 'Length' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Length') + fval_ = self.gds_validate_decimal(fval_, node, 'Length') + self.Length = fval_ + self.Length_nsprefix_ = child_.prefix + elif nodeName_ == 'Width' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Width') + fval_ = self.gds_validate_decimal(fval_, node, 'Width') + self.Width = fval_ + self.Width_nsprefix_ = child_.prefix + elif nodeName_ == 'Height' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Height') + fval_ = self.gds_validate_decimal(fval_, node, 'Height') + self.Height = fval_ + self.Height_nsprefix_ = child_.prefix + elif nodeName_ == 'Girth' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Girth') + fval_ = self.gds_validate_decimal(fval_, node, 'Girth') + self.Girth = fval_ + self.Girth_nsprefix_ = child_.prefix + elif nodeName_ == 'Shape': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Shape') + value_ = self.gds_validate_string(value_, node, 'Shape') + self.Shape = value_ + self.Shape_nsprefix_ = child_.prefix + elif nodeName_ == 'CIRequired': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'CIRequired') + ival_ = self.gds_validate_boolean(ival_, node, 'CIRequired') + self.CIRequired = ival_ + self.CIRequired_nsprefix_ = child_.prefix + elif nodeName_ == 'InvoiceDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'InvoiceDate') + value_ = self.gds_validate_string(value_, node, 'InvoiceDate') + self.InvoiceDate = value_ + self.InvoiceDate_nsprefix_ = child_.prefix + elif nodeName_ == 'InvoiceNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'InvoiceNumber') + value_ = self.gds_validate_string(value_, node, 'InvoiceNumber') + self.InvoiceNumber = value_ + self.InvoiceNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerOrderNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerOrderNumber') + value_ = self.gds_validate_string(value_, node, 'CustomerOrderNumber') + self.CustomerOrderNumber = value_ + self.CustomerOrderNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'CustOrderNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustOrderNumber') + value_ = self.gds_validate_string(value_, node, 'CustOrderNumber') + self.CustOrderNumber = value_ + self.CustOrderNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'TermsDelivery': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'TermsDelivery') + value_ = self.gds_validate_string(value_, node, 'TermsDelivery') + self.TermsDelivery = value_ + self.TermsDelivery_nsprefix_ = child_.prefix + elif nodeName_ == 'TermsDeliveryOther': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'TermsDeliveryOther') + value_ = self.gds_validate_string(value_, node, 'TermsDeliveryOther') + self.TermsDeliveryOther = value_ + self.TermsDeliveryOther_nsprefix_ = child_.prefix + elif nodeName_ == 'PackingCost' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'PackingCost') + fval_ = self.gds_validate_decimal(fval_, node, 'PackingCost') + self.PackingCost = fval_ + self.PackingCost_nsprefix_ = child_.prefix + elif nodeName_ == 'CountryUltDest': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CountryUltDest') + value_ = self.gds_validate_string(value_, node, 'CountryUltDest') + self.CountryUltDest = value_ + self.CountryUltDest_nsprefix_ = child_.prefix + elif nodeName_ == 'CIAgreement': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CIAgreement') + value_ = self.gds_validate_string(value_, node, 'CIAgreement') + self.CIAgreement = value_ + self.CIAgreement_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageType') + value_ = self.gds_validate_string(value_, node, 'ImageType') + self.ImageType = value_ + self.ImageType_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageLayout': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageLayout') + value_ = self.gds_validate_string(value_, node, 'ImageLayout') + self.ImageLayout = value_ + self.ImageLayout_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerRefNo': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerRefNo') + value_ = self.gds_validate_string(value_, node, 'CustomerRefNo') + self.CustomerRefNo = value_ + self.CustomerRefNo_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerRefNo2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerRefNo2') + value_ = self.gds_validate_string(value_, node, 'CustomerRefNo2') + self.CustomerRefNo2 = value_ + self.CustomerRefNo2_nsprefix_ = child_.prefix + elif nodeName_ == 'ShipDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ShipDate') + value_ = self.gds_validate_string(value_, node, 'ShipDate') + self.ShipDate = value_ + self.ShipDate_nsprefix_ = child_.prefix + elif nodeName_ == 'HoldForManifest': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'HoldForManifest') + value_ = self.gds_validate_string(value_, node, 'HoldForManifest') + self.HoldForManifest = value_ + self.HoldForManifest_nsprefix_ = child_.prefix + elif nodeName_ == 'PriceOptions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PriceOptions') + value_ = self.gds_validate_string(value_, node, 'PriceOptions') + self.PriceOptions = value_ + self.PriceOptions_nsprefix_ = child_.prefix + elif nodeName_ == 'CommercialShipment': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'CommercialShipment') + ival_ = self.gds_validate_boolean(ival_, node, 'CommercialShipment') + self.CommercialShipment = ival_ + self.CommercialShipment_nsprefix_ = child_.prefix + elif nodeName_ == 'BuyerFirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BuyerFirstName') + value_ = self.gds_validate_string(value_, node, 'BuyerFirstName') + self.BuyerFirstName = value_ + self.BuyerFirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'BuyerLastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BuyerLastName') + value_ = self.gds_validate_string(value_, node, 'BuyerLastName') + self.BuyerLastName = value_ + self.BuyerLastName_nsprefix_ = child_.prefix + elif nodeName_ == 'BuyerAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BuyerAddress1') + value_ = self.gds_validate_string(value_, node, 'BuyerAddress1') + self.BuyerAddress1 = value_ + self.BuyerAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'BuyerAddress2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BuyerAddress2') + value_ = self.gds_validate_string(value_, node, 'BuyerAddress2') + self.BuyerAddress2 = value_ + self.BuyerAddress2_nsprefix_ = child_.prefix + elif nodeName_ == 'BuyerAddress3': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BuyerAddress3') + value_ = self.gds_validate_string(value_, node, 'BuyerAddress3') + self.BuyerAddress3 = value_ + self.BuyerAddress3_nsprefix_ = child_.prefix + elif nodeName_ == 'BuyerCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BuyerCity') + value_ = self.gds_validate_string(value_, node, 'BuyerCity') + self.BuyerCity = value_ + self.BuyerCity_nsprefix_ = child_.prefix + elif nodeName_ == 'BuyerState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BuyerState') + value_ = self.gds_validate_string(value_, node, 'BuyerState') + self.BuyerState = value_ + self.BuyerState_nsprefix_ = child_.prefix + elif nodeName_ == 'BuyerPostalCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BuyerPostalCode') + value_ = self.gds_validate_string(value_, node, 'BuyerPostalCode') + self.BuyerPostalCode = value_ + self.BuyerPostalCode_nsprefix_ = child_.prefix + elif nodeName_ == 'BuyerCountry': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BuyerCountry') + value_ = self.gds_validate_string(value_, node, 'BuyerCountry') + self.BuyerCountry = value_ + self.BuyerCountry_nsprefix_ = child_.prefix + elif nodeName_ == 'BuyerTaxID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BuyerTaxID') + value_ = self.gds_validate_string(value_, node, 'BuyerTaxID') + self.BuyerTaxID = value_ + self.BuyerTaxID_nsprefix_ = child_.prefix + elif nodeName_ == 'BuyerRecipient': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BuyerRecipient') + value_ = self.gds_validate_string(value_, node, 'BuyerRecipient') + self.BuyerRecipient = value_ + self.BuyerRecipient_nsprefix_ = child_.prefix + elif nodeName_ == 'TermsPayment': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'TermsPayment') + value_ = self.gds_validate_string(value_, node, 'TermsPayment') + self.TermsPayment = value_ + self.TermsPayment_nsprefix_ = child_.prefix + elif nodeName_ == 'ActionCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ActionCode') + value_ = self.gds_validate_string(value_, node, 'ActionCode') + self.ActionCode = value_ + self.ActionCode_nsprefix_ = child_.prefix + elif nodeName_ == 'OptOutOfSPE': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'OptOutOfSPE') + ival_ = self.gds_validate_boolean(ival_, node, 'OptOutOfSPE') + self.OptOutOfSPE = ival_ + self.OptOutOfSPE_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PermitNumber') + value_ = self.gds_validate_string(value_, node, 'PermitNumber') + self.PermitNumber = value_ + self.PermitNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'AccountZipCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AccountZipCode') + value_ = self.gds_validate_string(value_, node, 'AccountZipCode') + self.AccountZipCode = value_ + self.AccountZipCode_nsprefix_ = child_.prefix + elif nodeName_ == 'Machinable': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'Machinable') + ival_ = self.gds_validate_boolean(ival_, node, 'Machinable') + self.Machinable = ival_ + self.Machinable_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationRateIndicator': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DestinationRateIndicator') + value_ = self.gds_validate_string(value_, node, 'DestinationRateIndicator') + self.DestinationRateIndicator = value_ + self.DestinationRateIndicator_nsprefix_ = child_.prefix + elif nodeName_ == 'MID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'MID') + value_ = self.gds_validate_string(value_, node, 'MID') + self.MID = value_ + self.MID_nsprefix_ = child_.prefix + elif nodeName_ == 'LogisticsManagerMID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LogisticsManagerMID') + value_ = self.gds_validate_string(value_, node, 'LogisticsManagerMID') + self.LogisticsManagerMID = value_ + self.LogisticsManagerMID_nsprefix_ = child_.prefix + elif nodeName_ == 'CRID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CRID') + value_ = self.gds_validate_string(value_, node, 'CRID') + self.CRID = value_ + self.CRID_nsprefix_ = child_.prefix + elif nodeName_ == 'VendorCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'VendorCode') + value_ = self.gds_validate_string(value_, node, 'VendorCode') + self.VendorCode = value_ + self.VendorCode_nsprefix_ = child_.prefix + elif nodeName_ == 'VendorProductVersionNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'VendorProductVersionNumber') + value_ = self.gds_validate_string(value_, node, 'VendorProductVersionNumber') + self.VendorProductVersionNumber = value_ + self.VendorProductVersionNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'OverrideMID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'OverrideMID') + value_ = self.gds_validate_string(value_, node, 'OverrideMID') + self.OverrideMID = value_ + self.OverrideMID_nsprefix_ = child_.prefix + elif nodeName_ == 'ChargebackCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ChargebackCode') + value_ = self.gds_validate_string(value_, node, 'ChargebackCode') + self.ChargebackCode = value_ + self.ChargebackCode_nsprefix_ = child_.prefix +# end class eVSGXGGetLabelRequest + + +class ImageParametersType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ImageParameter=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ImageParameter = ImageParameter + self.ImageParameter_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ImageParametersType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ImageParametersType.subclass: + return ImageParametersType.subclass(*args_, **kwargs_) + else: + return ImageParametersType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ImageParameter(self): + return self.ImageParameter + def set_ImageParameter(self, ImageParameter): + self.ImageParameter = ImageParameter + def has__content(self): + if ( + self.ImageParameter is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ImageParametersType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ImageParametersType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ImageParametersType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ImageParametersType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ImageParametersType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ImageParametersType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ImageParametersType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ImageParameter is not None: + namespaceprefix_ = self.ImageParameter_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageParameter_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageParameter>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageParameter), input_name='ImageParameter')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ImageParameter': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageParameter') + value_ = self.gds_validate_string(value_, node, 'ImageParameter') + self.ImageParameter = value_ + self.ImageParameter_nsprefix_ = child_.prefix +# end class ImageParametersType + + +class ShippingContentsType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ItemDetail=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if ItemDetail is None: + self.ItemDetail = [] + else: + self.ItemDetail = ItemDetail + self.ItemDetail_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ShippingContentsType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ShippingContentsType.subclass: + return ShippingContentsType.subclass(*args_, **kwargs_) + else: + return ShippingContentsType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ItemDetail(self): + return self.ItemDetail + def set_ItemDetail(self, ItemDetail): + self.ItemDetail = ItemDetail + def add_ItemDetail(self, value): + self.ItemDetail.append(value) + def insert_ItemDetail_at(self, index, value): + self.ItemDetail.insert(index, value) + def replace_ItemDetail_at(self, index, value): + self.ItemDetail[index] = value + def has__content(self): + if ( + self.ItemDetail + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ShippingContentsType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ShippingContentsType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ShippingContentsType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ShippingContentsType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ShippingContentsType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ShippingContentsType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ShippingContentsType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for ItemDetail_ in self.ItemDetail: + namespaceprefix_ = self.ItemDetail_nsprefix_ + ':' if (UseCapturedNS_ and self.ItemDetail_nsprefix_) else '' + ItemDetail_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ItemDetail', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ItemDetail': + obj_ = ItemDetailType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ItemDetail.append(obj_) + obj_.original_tagname_ = 'ItemDetail' +# end class ShippingContentsType + + +class ItemDetailType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Description=None, Commodity=None, Restriction=None, Quantity=None, UnitValue=None, NetPounds=None, NetOunces=None, UnitOfMeasure=None, HSTariffNumber=None, CountryofManufacture=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Description = Description + self.Description_nsprefix_ = None + self.Commodity = Commodity + self.Commodity_nsprefix_ = None + self.Restriction = Restriction + self.Restriction_nsprefix_ = None + self.Quantity = Quantity + self.Quantity_nsprefix_ = None + self.UnitValue = UnitValue + self.UnitValue_nsprefix_ = None + self.NetPounds = NetPounds + self.NetPounds_nsprefix_ = None + self.NetOunces = NetOunces + self.NetOunces_nsprefix_ = None + self.UnitOfMeasure = UnitOfMeasure + self.UnitOfMeasure_nsprefix_ = None + self.HSTariffNumber = HSTariffNumber + self.HSTariffNumber_nsprefix_ = None + self.CountryofManufacture = CountryofManufacture + self.CountryofManufacture_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ItemDetailType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ItemDetailType.subclass: + return ItemDetailType.subclass(*args_, **kwargs_) + else: + return ItemDetailType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Description(self): + return self.Description + def set_Description(self, Description): + self.Description = Description + def get_Commodity(self): + return self.Commodity + def set_Commodity(self, Commodity): + self.Commodity = Commodity + def get_Restriction(self): + return self.Restriction + def set_Restriction(self, Restriction): + self.Restriction = Restriction + def get_Quantity(self): + return self.Quantity + def set_Quantity(self, Quantity): + self.Quantity = Quantity + def get_UnitValue(self): + return self.UnitValue + def set_UnitValue(self, UnitValue): + self.UnitValue = UnitValue + def get_NetPounds(self): + return self.NetPounds + def set_NetPounds(self, NetPounds): + self.NetPounds = NetPounds + def get_NetOunces(self): + return self.NetOunces + def set_NetOunces(self, NetOunces): + self.NetOunces = NetOunces + def get_UnitOfMeasure(self): + return self.UnitOfMeasure + def set_UnitOfMeasure(self, UnitOfMeasure): + self.UnitOfMeasure = UnitOfMeasure + def get_HSTariffNumber(self): + return self.HSTariffNumber + def set_HSTariffNumber(self, HSTariffNumber): + self.HSTariffNumber = HSTariffNumber + def get_CountryofManufacture(self): + return self.CountryofManufacture + def set_CountryofManufacture(self, CountryofManufacture): + self.CountryofManufacture = CountryofManufacture + def has__content(self): + if ( + self.Description is not None or + self.Commodity is not None or + self.Restriction is not None or + self.Quantity is not None or + self.UnitValue is not None or + self.NetPounds is not None or + self.NetOunces is not None or + self.UnitOfMeasure is not None or + self.HSTariffNumber is not None or + self.CountryofManufacture is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ItemDetailType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ItemDetailType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ItemDetailType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ItemDetailType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ItemDetailType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ItemDetailType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ItemDetailType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Description is not None: + namespaceprefix_ = self.Description_nsprefix_ + ':' if (UseCapturedNS_ and self.Description_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDescription>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Description), input_name='Description')), namespaceprefix_ , eol_)) + if self.Commodity is not None: + namespaceprefix_ = self.Commodity_nsprefix_ + ':' if (UseCapturedNS_ and self.Commodity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommodity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Commodity), input_name='Commodity')), namespaceprefix_ , eol_)) + if self.Restriction is not None: + namespaceprefix_ = self.Restriction_nsprefix_ + ':' if (UseCapturedNS_ and self.Restriction_nsprefix_) else '' + self.Restriction.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Restriction', pretty_print=pretty_print) + if self.Quantity is not None: + namespaceprefix_ = self.Quantity_nsprefix_ + ':' if (UseCapturedNS_ and self.Quantity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sQuantity>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Quantity, input_name='Quantity'), namespaceprefix_ , eol_)) + if self.UnitValue is not None: + namespaceprefix_ = self.UnitValue_nsprefix_ + ':' if (UseCapturedNS_ and self.UnitValue_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUnitValue>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.UnitValue, input_name='UnitValue'), namespaceprefix_ , eol_)) + if self.NetPounds is not None: + namespaceprefix_ = self.NetPounds_nsprefix_ + ':' if (UseCapturedNS_ and self.NetPounds_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNetPounds>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.NetPounds, input_name='NetPounds'), namespaceprefix_ , eol_)) + if self.NetOunces is not None: + namespaceprefix_ = self.NetOunces_nsprefix_ + ':' if (UseCapturedNS_ and self.NetOunces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNetOunces>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.NetOunces, input_name='NetOunces'), namespaceprefix_ , eol_)) + if self.UnitOfMeasure is not None: + namespaceprefix_ = self.UnitOfMeasure_nsprefix_ + ':' if (UseCapturedNS_ and self.UnitOfMeasure_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUnitOfMeasure>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.UnitOfMeasure, input_name='UnitOfMeasure'), namespaceprefix_ , eol_)) + if self.HSTariffNumber is not None: + namespaceprefix_ = self.HSTariffNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.HSTariffNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHSTariffNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.HSTariffNumber), input_name='HSTariffNumber')), namespaceprefix_ , eol_)) + if self.CountryofManufacture is not None: + namespaceprefix_ = self.CountryofManufacture_nsprefix_ + ':' if (UseCapturedNS_ and self.CountryofManufacture_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCountryofManufacture>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CountryofManufacture), input_name='CountryofManufacture')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Description': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Description') + value_ = self.gds_validate_string(value_, node, 'Description') + self.Description = value_ + self.Description_nsprefix_ = child_.prefix + elif nodeName_ == 'Commodity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Commodity') + value_ = self.gds_validate_string(value_, node, 'Commodity') + self.Commodity = value_ + self.Commodity_nsprefix_ = child_.prefix + elif nodeName_ == 'Restriction': + obj_ = RestrictionType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Restriction = obj_ + obj_.original_tagname_ = 'Restriction' + elif nodeName_ == 'Quantity' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Quantity') + ival_ = self.gds_validate_integer(ival_, node, 'Quantity') + self.Quantity = ival_ + self.Quantity_nsprefix_ = child_.prefix + elif nodeName_ == 'UnitValue' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'UnitValue') + fval_ = self.gds_validate_decimal(fval_, node, 'UnitValue') + self.UnitValue = fval_ + self.UnitValue_nsprefix_ = child_.prefix + elif nodeName_ == 'NetPounds' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'NetPounds') + fval_ = self.gds_validate_decimal(fval_, node, 'NetPounds') + self.NetPounds = fval_ + self.NetPounds_nsprefix_ = child_.prefix + elif nodeName_ == 'NetOunces' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'NetOunces') + fval_ = self.gds_validate_decimal(fval_, node, 'NetOunces') + self.NetOunces = fval_ + self.NetOunces_nsprefix_ = child_.prefix + elif nodeName_ == 'UnitOfMeasure' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'UnitOfMeasure') + fval_ = self.gds_validate_decimal(fval_, node, 'UnitOfMeasure') + self.UnitOfMeasure = fval_ + self.UnitOfMeasure_nsprefix_ = child_.prefix + elif nodeName_ == 'HSTariffNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'HSTariffNumber') + value_ = self.gds_validate_string(value_, node, 'HSTariffNumber') + self.HSTariffNumber = value_ + self.HSTariffNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'CountryofManufacture': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CountryofManufacture') + value_ = self.gds_validate_string(value_, node, 'CountryofManufacture') + self.CountryofManufacture = value_ + self.CountryofManufacture_nsprefix_ = child_.prefix +# end class ItemDetailType + + +class RestrictionType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, FootnoteNumber=None, Response=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.FootnoteNumber = FootnoteNumber + self.FootnoteNumber_nsprefix_ = None + self.Response = Response + self.Response_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, RestrictionType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if RestrictionType.subclass: + return RestrictionType.subclass(*args_, **kwargs_) + else: + return RestrictionType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FootnoteNumber(self): + return self.FootnoteNumber + def set_FootnoteNumber(self, FootnoteNumber): + self.FootnoteNumber = FootnoteNumber + def get_Response(self): + return self.Response + def set_Response(self, Response): + self.Response = Response + def has__content(self): + if ( + self.FootnoteNumber is not None or + self.Response is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RestrictionType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('RestrictionType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'RestrictionType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RestrictionType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RestrictionType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RestrictionType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RestrictionType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FootnoteNumber is not None: + namespaceprefix_ = self.FootnoteNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.FootnoteNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFootnoteNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FootnoteNumber), input_name='FootnoteNumber')), namespaceprefix_ , eol_)) + if self.Response is not None: + namespaceprefix_ = self.Response_nsprefix_ + ':' if (UseCapturedNS_ and self.Response_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sResponse>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Response), input_name='Response')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FootnoteNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FootnoteNumber') + value_ = self.gds_validate_string(value_, node, 'FootnoteNumber') + self.FootnoteNumber = value_ + self.FootnoteNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'Response': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Response') + value_ = self.gds_validate_string(value_, node, 'Response') + self.Response = value_ + self.Response_nsprefix_ = child_.prefix +# end class RestrictionType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSGXGGetLabelRequest' + rootClass = eVSGXGGetLabelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSGXGGetLabelRequest' + rootClass = eVSGXGGetLabelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSGXGGetLabelRequest' + rootClass = eVSGXGGetLabelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSGXGGetLabelRequest' + rootClass = eVSGXGGetLabelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from evs_gxg_get_label_request import *\n\n') + sys.stdout.write('import evs_gxg_get_label_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "ImageParametersType", + "ItemDetailType", + "RestrictionType", + "ShippingContentsType", + "eVSGXGGetLabelRequest" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/evs_gxg_get_label_response.py b/modules/connectors/usps_international/karrio/schemas/usps_international/evs_gxg_get_label_response.py new file mode 100644 index 0000000000..3608f7b7cf --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/evs_gxg_get_label_response.py @@ -0,0 +1,1694 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:11 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/evs_gxg_get_label_response.py') +# +# Command line arguments: +# ./schemas/eVSGXGGetLabelResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/evs_gxg_get_label_response.py" ./schemas/eVSGXGGetLabelResponse.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class eVSGXGGetLabelResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Postage=None, CommodityGuarantee=None, Insurance=None, USPSBarcodeNumber=None, FedExBarcodeNumber=None, LabelImage=None, LabelImagePage2=None, LabelImagePage3=None, LabelImagePage4=None, CIImage=None, CIImagePage2=None, CIImagePage3=None, CIImagePage4=None, InsuranceFee=None, DimensionalWeight=None, LogMessage=None, RemainingBarcodes=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Postage = Postage + self.Postage_nsprefix_ = None + self.CommodityGuarantee = CommodityGuarantee + self.CommodityGuarantee_nsprefix_ = None + self.Insurance = Insurance + self.Insurance_nsprefix_ = None + self.USPSBarcodeNumber = USPSBarcodeNumber + self.USPSBarcodeNumber_nsprefix_ = None + self.FedExBarcodeNumber = FedExBarcodeNumber + self.FedExBarcodeNumber_nsprefix_ = None + self.LabelImage = LabelImage + self.LabelImage_nsprefix_ = None + self.LabelImagePage2 = LabelImagePage2 + self.LabelImagePage2_nsprefix_ = None + self.LabelImagePage3 = LabelImagePage3 + self.LabelImagePage3_nsprefix_ = None + self.LabelImagePage4 = LabelImagePage4 + self.LabelImagePage4_nsprefix_ = None + self.CIImage = CIImage + self.CIImage_nsprefix_ = None + self.CIImagePage2 = CIImagePage2 + self.CIImagePage2_nsprefix_ = None + self.CIImagePage3 = CIImagePage3 + self.CIImagePage3_nsprefix_ = None + self.CIImagePage4 = CIImagePage4 + self.CIImagePage4_nsprefix_ = None + self.InsuranceFee = InsuranceFee + self.InsuranceFee_nsprefix_ = None + self.DimensionalWeight = DimensionalWeight + self.DimensionalWeight_nsprefix_ = None + self.LogMessage = LogMessage + self.LogMessage_nsprefix_ = None + self.RemainingBarcodes = RemainingBarcodes + self.RemainingBarcodes_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, eVSGXGGetLabelResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if eVSGXGGetLabelResponse.subclass: + return eVSGXGGetLabelResponse.subclass(*args_, **kwargs_) + else: + return eVSGXGGetLabelResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Postage(self): + return self.Postage + def set_Postage(self, Postage): + self.Postage = Postage + def get_CommodityGuarantee(self): + return self.CommodityGuarantee + def set_CommodityGuarantee(self, CommodityGuarantee): + self.CommodityGuarantee = CommodityGuarantee + def get_Insurance(self): + return self.Insurance + def set_Insurance(self, Insurance): + self.Insurance = Insurance + def get_USPSBarcodeNumber(self): + return self.USPSBarcodeNumber + def set_USPSBarcodeNumber(self, USPSBarcodeNumber): + self.USPSBarcodeNumber = USPSBarcodeNumber + def get_FedExBarcodeNumber(self): + return self.FedExBarcodeNumber + def set_FedExBarcodeNumber(self, FedExBarcodeNumber): + self.FedExBarcodeNumber = FedExBarcodeNumber + def get_LabelImage(self): + return self.LabelImage + def set_LabelImage(self, LabelImage): + self.LabelImage = LabelImage + def get_LabelImagePage2(self): + return self.LabelImagePage2 + def set_LabelImagePage2(self, LabelImagePage2): + self.LabelImagePage2 = LabelImagePage2 + def get_LabelImagePage3(self): + return self.LabelImagePage3 + def set_LabelImagePage3(self, LabelImagePage3): + self.LabelImagePage3 = LabelImagePage3 + def get_LabelImagePage4(self): + return self.LabelImagePage4 + def set_LabelImagePage4(self, LabelImagePage4): + self.LabelImagePage4 = LabelImagePage4 + def get_CIImage(self): + return self.CIImage + def set_CIImage(self, CIImage): + self.CIImage = CIImage + def get_CIImagePage2(self): + return self.CIImagePage2 + def set_CIImagePage2(self, CIImagePage2): + self.CIImagePage2 = CIImagePage2 + def get_CIImagePage3(self): + return self.CIImagePage3 + def set_CIImagePage3(self, CIImagePage3): + self.CIImagePage3 = CIImagePage3 + def get_CIImagePage4(self): + return self.CIImagePage4 + def set_CIImagePage4(self, CIImagePage4): + self.CIImagePage4 = CIImagePage4 + def get_InsuranceFee(self): + return self.InsuranceFee + def set_InsuranceFee(self, InsuranceFee): + self.InsuranceFee = InsuranceFee + def get_DimensionalWeight(self): + return self.DimensionalWeight + def set_DimensionalWeight(self, DimensionalWeight): + self.DimensionalWeight = DimensionalWeight + def get_LogMessage(self): + return self.LogMessage + def set_LogMessage(self, LogMessage): + self.LogMessage = LogMessage + def get_RemainingBarcodes(self): + return self.RemainingBarcodes + def set_RemainingBarcodes(self, RemainingBarcodes): + self.RemainingBarcodes = RemainingBarcodes + def has__content(self): + if ( + self.Postage is not None or + self.CommodityGuarantee is not None or + self.Insurance is not None or + self.USPSBarcodeNumber is not None or + self.FedExBarcodeNumber is not None or + self.LabelImage is not None or + self.LabelImagePage2 is not None or + self.LabelImagePage3 is not None or + self.LabelImagePage4 is not None or + self.CIImage is not None or + self.CIImagePage2 is not None or + self.CIImagePage3 is not None or + self.CIImagePage4 is not None or + self.InsuranceFee is not None or + self.DimensionalWeight is not None or + self.LogMessage is not None or + self.RemainingBarcodes is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSGXGGetLabelResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('eVSGXGGetLabelResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'eVSGXGGetLabelResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='eVSGXGGetLabelResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='eVSGXGGetLabelResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='eVSGXGGetLabelResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSGXGGetLabelResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Postage is not None: + namespaceprefix_ = self.Postage_nsprefix_ + ':' if (UseCapturedNS_ and self.Postage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPostage>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Postage, input_name='Postage'), namespaceprefix_ , eol_)) + if self.CommodityGuarantee is not None: + namespaceprefix_ = self.CommodityGuarantee_nsprefix_ + ':' if (UseCapturedNS_ and self.CommodityGuarantee_nsprefix_) else '' + self.CommodityGuarantee.export(outfile, level, namespaceprefix_, namespacedef_='', name_='CommodityGuarantee', pretty_print=pretty_print) + if self.Insurance is not None: + namespaceprefix_ = self.Insurance_nsprefix_ + ':' if (UseCapturedNS_ and self.Insurance_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInsurance>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Insurance), input_name='Insurance')), namespaceprefix_ , eol_)) + if self.USPSBarcodeNumber is not None: + namespaceprefix_ = self.USPSBarcodeNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.USPSBarcodeNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUSPSBarcodeNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.USPSBarcodeNumber), input_name='USPSBarcodeNumber')), namespaceprefix_ , eol_)) + if self.FedExBarcodeNumber is not None: + namespaceprefix_ = self.FedExBarcodeNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.FedExBarcodeNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFedExBarcodeNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FedExBarcodeNumber), input_name='FedExBarcodeNumber')), namespaceprefix_ , eol_)) + if self.LabelImage is not None: + namespaceprefix_ = self.LabelImage_nsprefix_ + ':' if (UseCapturedNS_ and self.LabelImage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLabelImage>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LabelImage), input_name='LabelImage')), namespaceprefix_ , eol_)) + if self.LabelImagePage2 is not None: + namespaceprefix_ = self.LabelImagePage2_nsprefix_ + ':' if (UseCapturedNS_ and self.LabelImagePage2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLabelImagePage2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LabelImagePage2), input_name='LabelImagePage2')), namespaceprefix_ , eol_)) + if self.LabelImagePage3 is not None: + namespaceprefix_ = self.LabelImagePage3_nsprefix_ + ':' if (UseCapturedNS_ and self.LabelImagePage3_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLabelImagePage3>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LabelImagePage3), input_name='LabelImagePage3')), namespaceprefix_ , eol_)) + if self.LabelImagePage4 is not None: + namespaceprefix_ = self.LabelImagePage4_nsprefix_ + ':' if (UseCapturedNS_ and self.LabelImagePage4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLabelImagePage4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LabelImagePage4), input_name='LabelImagePage4')), namespaceprefix_ , eol_)) + if self.CIImage is not None: + namespaceprefix_ = self.CIImage_nsprefix_ + ':' if (UseCapturedNS_ and self.CIImage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCIImage>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CIImage), input_name='CIImage')), namespaceprefix_ , eol_)) + if self.CIImagePage2 is not None: + namespaceprefix_ = self.CIImagePage2_nsprefix_ + ':' if (UseCapturedNS_ and self.CIImagePage2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCIImagePage2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CIImagePage2), input_name='CIImagePage2')), namespaceprefix_ , eol_)) + if self.CIImagePage3 is not None: + namespaceprefix_ = self.CIImagePage3_nsprefix_ + ':' if (UseCapturedNS_ and self.CIImagePage3_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCIImagePage3>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CIImagePage3), input_name='CIImagePage3')), namespaceprefix_ , eol_)) + if self.CIImagePage4 is not None: + namespaceprefix_ = self.CIImagePage4_nsprefix_ + ':' if (UseCapturedNS_ and self.CIImagePage4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCIImagePage4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CIImagePage4), input_name='CIImagePage4')), namespaceprefix_ , eol_)) + if self.InsuranceFee is not None: + namespaceprefix_ = self.InsuranceFee_nsprefix_ + ':' if (UseCapturedNS_ and self.InsuranceFee_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInsuranceFee>%s%s' % (namespaceprefix_ , self.gds_format_float(self.InsuranceFee, input_name='InsuranceFee'), namespaceprefix_ , eol_)) + if self.DimensionalWeight is not None: + namespaceprefix_ = self.DimensionalWeight_nsprefix_ + ':' if (UseCapturedNS_ and self.DimensionalWeight_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDimensionalWeight>%s%s' % (namespaceprefix_ , self.gds_format_float(self.DimensionalWeight, input_name='DimensionalWeight'), namespaceprefix_ , eol_)) + if self.LogMessage is not None: + namespaceprefix_ = self.LogMessage_nsprefix_ + ':' if (UseCapturedNS_ and self.LogMessage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLogMessage>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LogMessage), input_name='LogMessage')), namespaceprefix_ , eol_)) + if self.RemainingBarcodes is not None: + namespaceprefix_ = self.RemainingBarcodes_nsprefix_ + ':' if (UseCapturedNS_ and self.RemainingBarcodes_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRemainingBarcodes>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.RemainingBarcodes, input_name='RemainingBarcodes'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Postage' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Postage') + fval_ = self.gds_validate_float(fval_, node, 'Postage') + self.Postage = fval_ + self.Postage_nsprefix_ = child_.prefix + elif nodeName_ == 'CommodityGuarantee': + obj_ = CommodityGuaranteeType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.CommodityGuarantee = obj_ + obj_.original_tagname_ = 'CommodityGuarantee' + elif nodeName_ == 'Insurance': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Insurance') + value_ = self.gds_validate_string(value_, node, 'Insurance') + self.Insurance = value_ + self.Insurance_nsprefix_ = child_.prefix + elif nodeName_ == 'USPSBarcodeNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'USPSBarcodeNumber') + value_ = self.gds_validate_string(value_, node, 'USPSBarcodeNumber') + self.USPSBarcodeNumber = value_ + self.USPSBarcodeNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'FedExBarcodeNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FedExBarcodeNumber') + value_ = self.gds_validate_string(value_, node, 'FedExBarcodeNumber') + self.FedExBarcodeNumber = value_ + self.FedExBarcodeNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'LabelImage': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LabelImage') + value_ = self.gds_validate_string(value_, node, 'LabelImage') + self.LabelImage = value_ + self.LabelImage_nsprefix_ = child_.prefix + elif nodeName_ == 'LabelImagePage2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LabelImagePage2') + value_ = self.gds_validate_string(value_, node, 'LabelImagePage2') + self.LabelImagePage2 = value_ + self.LabelImagePage2_nsprefix_ = child_.prefix + elif nodeName_ == 'LabelImagePage3': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LabelImagePage3') + value_ = self.gds_validate_string(value_, node, 'LabelImagePage3') + self.LabelImagePage3 = value_ + self.LabelImagePage3_nsprefix_ = child_.prefix + elif nodeName_ == 'LabelImagePage4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LabelImagePage4') + value_ = self.gds_validate_string(value_, node, 'LabelImagePage4') + self.LabelImagePage4 = value_ + self.LabelImagePage4_nsprefix_ = child_.prefix + elif nodeName_ == 'CIImage': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CIImage') + value_ = self.gds_validate_string(value_, node, 'CIImage') + self.CIImage = value_ + self.CIImage_nsprefix_ = child_.prefix + elif nodeName_ == 'CIImagePage2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CIImagePage2') + value_ = self.gds_validate_string(value_, node, 'CIImagePage2') + self.CIImagePage2 = value_ + self.CIImagePage2_nsprefix_ = child_.prefix + elif nodeName_ == 'CIImagePage3': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CIImagePage3') + value_ = self.gds_validate_string(value_, node, 'CIImagePage3') + self.CIImagePage3 = value_ + self.CIImagePage3_nsprefix_ = child_.prefix + elif nodeName_ == 'CIImagePage4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CIImagePage4') + value_ = self.gds_validate_string(value_, node, 'CIImagePage4') + self.CIImagePage4 = value_ + self.CIImagePage4_nsprefix_ = child_.prefix + elif nodeName_ == 'InsuranceFee' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'InsuranceFee') + fval_ = self.gds_validate_float(fval_, node, 'InsuranceFee') + self.InsuranceFee = fval_ + self.InsuranceFee_nsprefix_ = child_.prefix + elif nodeName_ == 'DimensionalWeight' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'DimensionalWeight') + fval_ = self.gds_validate_float(fval_, node, 'DimensionalWeight') + self.DimensionalWeight = fval_ + self.DimensionalWeight_nsprefix_ = child_.prefix + elif nodeName_ == 'LogMessage': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LogMessage') + value_ = self.gds_validate_string(value_, node, 'LogMessage') + self.LogMessage = value_ + self.LogMessage_nsprefix_ = child_.prefix + elif nodeName_ == 'RemainingBarcodes' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'RemainingBarcodes') + ival_ = self.gds_validate_integer(ival_, node, 'RemainingBarcodes') + self.RemainingBarcodes = ival_ + self.RemainingBarcodes_nsprefix_ = child_.prefix +# end class eVSGXGGetLabelResponse + + +class CommodityGuaranteeType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, CommodityType=None, GuaranteeDate=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.CommodityType = CommodityType + self.CommodityType_nsprefix_ = None + self.GuaranteeDate = GuaranteeDate + self.GuaranteeDate_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CommodityGuaranteeType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CommodityGuaranteeType.subclass: + return CommodityGuaranteeType.subclass(*args_, **kwargs_) + else: + return CommodityGuaranteeType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_CommodityType(self): + return self.CommodityType + def set_CommodityType(self, CommodityType): + self.CommodityType = CommodityType + def get_GuaranteeDate(self): + return self.GuaranteeDate + def set_GuaranteeDate(self, GuaranteeDate): + self.GuaranteeDate = GuaranteeDate + def has__content(self): + if ( + self.CommodityType is not None or + self.GuaranteeDate is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CommodityGuaranteeType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CommodityGuaranteeType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CommodityGuaranteeType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CommodityGuaranteeType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CommodityGuaranteeType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CommodityGuaranteeType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CommodityGuaranteeType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.CommodityType is not None: + namespaceprefix_ = self.CommodityType_nsprefix_ + ':' if (UseCapturedNS_ and self.CommodityType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommodityType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CommodityType), input_name='CommodityType')), namespaceprefix_ , eol_)) + if self.GuaranteeDate is not None: + namespaceprefix_ = self.GuaranteeDate_nsprefix_ + ':' if (UseCapturedNS_ and self.GuaranteeDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGuaranteeDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.GuaranteeDate), input_name='GuaranteeDate')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'CommodityType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CommodityType') + value_ = self.gds_validate_string(value_, node, 'CommodityType') + self.CommodityType = value_ + self.CommodityType_nsprefix_ = child_.prefix + elif nodeName_ == 'GuaranteeDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'GuaranteeDate') + value_ = self.gds_validate_string(value_, node, 'GuaranteeDate') + self.GuaranteeDate = value_ + self.GuaranteeDate_nsprefix_ = child_.prefix +# end class CommodityGuaranteeType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSGXGGetLabelResponse' + rootClass = eVSGXGGetLabelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSGXGGetLabelResponse' + rootClass = eVSGXGGetLabelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSGXGGetLabelResponse' + rootClass = eVSGXGGetLabelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSGXGGetLabelResponse' + rootClass = eVSGXGGetLabelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from evs_gxg_get_label_response import *\n\n') + sys.stdout.write('import evs_gxg_get_label_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CommodityGuaranteeType", + "eVSGXGGetLabelResponse" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/evs_priority_mail_intl_request.py b/modules/connectors/usps_international/karrio/schemas/usps_international/evs_priority_mail_intl_request.py new file mode 100644 index 0000000000..1ddc567be8 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/evs_priority_mail_intl_request.py @@ -0,0 +1,3446 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:12 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/evs_priority_mail_intl_request.py') +# +# Command line arguments: +# ./schemas/eVSPriorityMailIntlRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/evs_priority_mail_intl_request.py" ./schemas/eVSPriorityMailIntlRequest.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class eVSPriorityMailIntlRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, Option=None, Revision=None, ImageParameters=None, FromFirstName=None, FromMiddleInitial=None, FromLastName=None, FromFirm=None, FromAddress1=None, FromAddress2=None, FromUrbanization=None, FromCity=None, FromState=None, FromZip5=None, FromZip4=None, FromPhone=None, FromCustomsReference=None, ToName=None, ToFirstName=None, ToLastName=None, ToFirm=None, ToAddress1=None, ToAddress2=None, ToAddress3=None, ToCity=None, ToProvince=None, ToCountry=None, ToPostalCode=None, ToPOBoxFlag=None, ToPhone=None, ToFax=None, ToEmail=None, ImportersReferenceNumber=None, NonDeliveryOption=None, RedirectName=None, RedirectEmail=None, RedirectSMS=None, RedirectAddress=None, RedirectCity=None, RedirectState=None, RedirectZipCode=None, RedirectZip4=None, Container=None, ShippingContents=None, Insured=None, InsuredNumber=None, InsuredAmount=None, GrossPounds=None, GrossOunces=None, ContentType=None, ContentTypeOther=None, Agreement=None, Comments=None, LicenseNumber=None, CertificateNumber=None, InvoiceNumber=None, ImageType=None, ImageLayout=None, CustomerRefNo=None, CustomerRefNo2=None, POZipCode=None, LabelDate=None, EMCAAccount=None, HoldForManifest=None, EELPFC=None, PriceOptions=None, Width=None, Length=None, Height=None, Girth=None, ExtraServices=None, ActionCode=None, OptOutOfSPE=None, PermitNumber=None, AccountZipCode=None, ImportersReferenceType=None, ImportersTelephoneNumber=None, ImportersFaxNumber=None, ImportersEmail=None, Machinable=None, DestinationRateIndicator=None, MID=None, LogisticsManagerMID=None, CRID=None, VendorCode=None, VendorProductVersionNumber=None, ePostageMailerReporting=None, SenderFirstName=None, SenderLastName=None, SenderBusinessName=None, SenderAddress1=None, SenderCity=None, SenderState=None, SenderZip5=None, SenderPhone=None, SenderEmail=None, ChargebackCode=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.Option = Option + self.Option_nsprefix_ = None + self.Revision = Revision + self.Revision_nsprefix_ = None + self.ImageParameters = ImageParameters + self.ImageParameters_nsprefix_ = None + self.FromFirstName = FromFirstName + self.FromFirstName_nsprefix_ = None + self.FromMiddleInitial = FromMiddleInitial + self.FromMiddleInitial_nsprefix_ = None + self.FromLastName = FromLastName + self.FromLastName_nsprefix_ = None + self.FromFirm = FromFirm + self.FromFirm_nsprefix_ = None + self.FromAddress1 = FromAddress1 + self.FromAddress1_nsprefix_ = None + self.FromAddress2 = FromAddress2 + self.FromAddress2_nsprefix_ = None + self.FromUrbanization = FromUrbanization + self.FromUrbanization_nsprefix_ = None + self.FromCity = FromCity + self.FromCity_nsprefix_ = None + self.FromState = FromState + self.FromState_nsprefix_ = None + self.FromZip5 = FromZip5 + self.FromZip5_nsprefix_ = None + self.FromZip4 = FromZip4 + self.FromZip4_nsprefix_ = None + self.FromPhone = FromPhone + self.FromPhone_nsprefix_ = None + self.FromCustomsReference = FromCustomsReference + self.FromCustomsReference_nsprefix_ = None + self.ToName = ToName + self.ToName_nsprefix_ = None + self.ToFirstName = ToFirstName + self.ToFirstName_nsprefix_ = None + self.ToLastName = ToLastName + self.ToLastName_nsprefix_ = None + self.ToFirm = ToFirm + self.ToFirm_nsprefix_ = None + self.ToAddress1 = ToAddress1 + self.ToAddress1_nsprefix_ = None + self.ToAddress2 = ToAddress2 + self.ToAddress2_nsprefix_ = None + self.ToAddress3 = ToAddress3 + self.ToAddress3_nsprefix_ = None + self.ToCity = ToCity + self.ToCity_nsprefix_ = None + self.ToProvince = ToProvince + self.ToProvince_nsprefix_ = None + self.ToCountry = ToCountry + self.ToCountry_nsprefix_ = None + self.ToPostalCode = ToPostalCode + self.ToPostalCode_nsprefix_ = None + self.ToPOBoxFlag = ToPOBoxFlag + self.ToPOBoxFlag_nsprefix_ = None + self.ToPhone = ToPhone + self.ToPhone_nsprefix_ = None + self.ToFax = ToFax + self.ToFax_nsprefix_ = None + self.ToEmail = ToEmail + self.ToEmail_nsprefix_ = None + self.ImportersReferenceNumber = ImportersReferenceNumber + self.ImportersReferenceNumber_nsprefix_ = None + self.NonDeliveryOption = NonDeliveryOption + self.NonDeliveryOption_nsprefix_ = None + self.RedirectName = RedirectName + self.RedirectName_nsprefix_ = None + self.RedirectEmail = RedirectEmail + self.RedirectEmail_nsprefix_ = None + self.RedirectSMS = RedirectSMS + self.RedirectSMS_nsprefix_ = None + self.RedirectAddress = RedirectAddress + self.RedirectAddress_nsprefix_ = None + self.RedirectCity = RedirectCity + self.RedirectCity_nsprefix_ = None + self.RedirectState = RedirectState + self.RedirectState_nsprefix_ = None + self.RedirectZipCode = RedirectZipCode + self.RedirectZipCode_nsprefix_ = None + self.RedirectZip4 = RedirectZip4 + self.RedirectZip4_nsprefix_ = None + self.Container = Container + self.Container_nsprefix_ = None + self.ShippingContents = ShippingContents + self.ShippingContents_nsprefix_ = None + self.Insured = Insured + self.Insured_nsprefix_ = None + self.InsuredNumber = InsuredNumber + self.InsuredNumber_nsprefix_ = None + self.InsuredAmount = InsuredAmount + self.InsuredAmount_nsprefix_ = None + self.GrossPounds = GrossPounds + self.GrossPounds_nsprefix_ = None + self.GrossOunces = GrossOunces + self.GrossOunces_nsprefix_ = None + self.ContentType = ContentType + self.ContentType_nsprefix_ = None + self.ContentTypeOther = ContentTypeOther + self.ContentTypeOther_nsprefix_ = None + self.Agreement = Agreement + self.Agreement_nsprefix_ = None + self.Comments = Comments + self.Comments_nsprefix_ = None + self.LicenseNumber = LicenseNumber + self.LicenseNumber_nsprefix_ = None + self.CertificateNumber = CertificateNumber + self.CertificateNumber_nsprefix_ = None + self.InvoiceNumber = InvoiceNumber + self.InvoiceNumber_nsprefix_ = None + self.ImageType = ImageType + self.ImageType_nsprefix_ = None + self.ImageLayout = ImageLayout + self.ImageLayout_nsprefix_ = None + self.CustomerRefNo = CustomerRefNo + self.CustomerRefNo_nsprefix_ = None + self.CustomerRefNo2 = CustomerRefNo2 + self.CustomerRefNo2_nsprefix_ = None + self.POZipCode = POZipCode + self.POZipCode_nsprefix_ = None + self.LabelDate = LabelDate + self.LabelDate_nsprefix_ = None + self.EMCAAccount = EMCAAccount + self.EMCAAccount_nsprefix_ = None + self.HoldForManifest = HoldForManifest + self.HoldForManifest_nsprefix_ = None + self.EELPFC = EELPFC + self.EELPFC_nsprefix_ = None + self.PriceOptions = PriceOptions + self.PriceOptions_nsprefix_ = None + self.Width = Width + self.Width_nsprefix_ = None + self.Length = Length + self.Length_nsprefix_ = None + self.Height = Height + self.Height_nsprefix_ = None + self.Girth = Girth + self.Girth_nsprefix_ = None + self.ExtraServices = ExtraServices + self.ExtraServices_nsprefix_ = None + self.ActionCode = ActionCode + self.ActionCode_nsprefix_ = None + self.OptOutOfSPE = OptOutOfSPE + self.OptOutOfSPE_nsprefix_ = None + self.PermitNumber = PermitNumber + self.PermitNumber_nsprefix_ = None + self.AccountZipCode = AccountZipCode + self.AccountZipCode_nsprefix_ = None + self.ImportersReferenceType = ImportersReferenceType + self.ImportersReferenceType_nsprefix_ = None + self.ImportersTelephoneNumber = ImportersTelephoneNumber + self.ImportersTelephoneNumber_nsprefix_ = None + self.ImportersFaxNumber = ImportersFaxNumber + self.ImportersFaxNumber_nsprefix_ = None + self.ImportersEmail = ImportersEmail + self.ImportersEmail_nsprefix_ = None + self.Machinable = Machinable + self.Machinable_nsprefix_ = None + self.DestinationRateIndicator = DestinationRateIndicator + self.DestinationRateIndicator_nsprefix_ = None + self.MID = MID + self.MID_nsprefix_ = None + self.LogisticsManagerMID = LogisticsManagerMID + self.LogisticsManagerMID_nsprefix_ = None + self.CRID = CRID + self.CRID_nsprefix_ = None + self.VendorCode = VendorCode + self.VendorCode_nsprefix_ = None + self.VendorProductVersionNumber = VendorProductVersionNumber + self.VendorProductVersionNumber_nsprefix_ = None + self.ePostageMailerReporting = ePostageMailerReporting + self.ePostageMailerReporting_nsprefix_ = None + self.SenderFirstName = SenderFirstName + self.SenderFirstName_nsprefix_ = None + self.SenderLastName = SenderLastName + self.SenderLastName_nsprefix_ = None + self.SenderBusinessName = SenderBusinessName + self.SenderBusinessName_nsprefix_ = None + self.SenderAddress1 = SenderAddress1 + self.SenderAddress1_nsprefix_ = None + self.SenderCity = SenderCity + self.SenderCity_nsprefix_ = None + self.SenderState = SenderState + self.SenderState_nsprefix_ = None + self.SenderZip5 = SenderZip5 + self.SenderZip5_nsprefix_ = None + self.SenderPhone = SenderPhone + self.SenderPhone_nsprefix_ = None + self.SenderEmail = SenderEmail + self.SenderEmail_nsprefix_ = None + self.ChargebackCode = ChargebackCode + self.ChargebackCode_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, eVSPriorityMailIntlRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if eVSPriorityMailIntlRequest.subclass: + return eVSPriorityMailIntlRequest.subclass(*args_, **kwargs_) + else: + return eVSPriorityMailIntlRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Option(self): + return self.Option + def set_Option(self, Option): + self.Option = Option + def get_Revision(self): + return self.Revision + def set_Revision(self, Revision): + self.Revision = Revision + def get_ImageParameters(self): + return self.ImageParameters + def set_ImageParameters(self, ImageParameters): + self.ImageParameters = ImageParameters + def get_FromFirstName(self): + return self.FromFirstName + def set_FromFirstName(self, FromFirstName): + self.FromFirstName = FromFirstName + def get_FromMiddleInitial(self): + return self.FromMiddleInitial + def set_FromMiddleInitial(self, FromMiddleInitial): + self.FromMiddleInitial = FromMiddleInitial + def get_FromLastName(self): + return self.FromLastName + def set_FromLastName(self, FromLastName): + self.FromLastName = FromLastName + def get_FromFirm(self): + return self.FromFirm + def set_FromFirm(self, FromFirm): + self.FromFirm = FromFirm + def get_FromAddress1(self): + return self.FromAddress1 + def set_FromAddress1(self, FromAddress1): + self.FromAddress1 = FromAddress1 + def get_FromAddress2(self): + return self.FromAddress2 + def set_FromAddress2(self, FromAddress2): + self.FromAddress2 = FromAddress2 + def get_FromUrbanization(self): + return self.FromUrbanization + def set_FromUrbanization(self, FromUrbanization): + self.FromUrbanization = FromUrbanization + def get_FromCity(self): + return self.FromCity + def set_FromCity(self, FromCity): + self.FromCity = FromCity + def get_FromState(self): + return self.FromState + def set_FromState(self, FromState): + self.FromState = FromState + def get_FromZip5(self): + return self.FromZip5 + def set_FromZip5(self, FromZip5): + self.FromZip5 = FromZip5 + def get_FromZip4(self): + return self.FromZip4 + def set_FromZip4(self, FromZip4): + self.FromZip4 = FromZip4 + def get_FromPhone(self): + return self.FromPhone + def set_FromPhone(self, FromPhone): + self.FromPhone = FromPhone + def get_FromCustomsReference(self): + return self.FromCustomsReference + def set_FromCustomsReference(self, FromCustomsReference): + self.FromCustomsReference = FromCustomsReference + def get_ToName(self): + return self.ToName + def set_ToName(self, ToName): + self.ToName = ToName + def get_ToFirstName(self): + return self.ToFirstName + def set_ToFirstName(self, ToFirstName): + self.ToFirstName = ToFirstName + def get_ToLastName(self): + return self.ToLastName + def set_ToLastName(self, ToLastName): + self.ToLastName = ToLastName + def get_ToFirm(self): + return self.ToFirm + def set_ToFirm(self, ToFirm): + self.ToFirm = ToFirm + def get_ToAddress1(self): + return self.ToAddress1 + def set_ToAddress1(self, ToAddress1): + self.ToAddress1 = ToAddress1 + def get_ToAddress2(self): + return self.ToAddress2 + def set_ToAddress2(self, ToAddress2): + self.ToAddress2 = ToAddress2 + def get_ToAddress3(self): + return self.ToAddress3 + def set_ToAddress3(self, ToAddress3): + self.ToAddress3 = ToAddress3 + def get_ToCity(self): + return self.ToCity + def set_ToCity(self, ToCity): + self.ToCity = ToCity + def get_ToProvince(self): + return self.ToProvince + def set_ToProvince(self, ToProvince): + self.ToProvince = ToProvince + def get_ToCountry(self): + return self.ToCountry + def set_ToCountry(self, ToCountry): + self.ToCountry = ToCountry + def get_ToPostalCode(self): + return self.ToPostalCode + def set_ToPostalCode(self, ToPostalCode): + self.ToPostalCode = ToPostalCode + def get_ToPOBoxFlag(self): + return self.ToPOBoxFlag + def set_ToPOBoxFlag(self, ToPOBoxFlag): + self.ToPOBoxFlag = ToPOBoxFlag + def get_ToPhone(self): + return self.ToPhone + def set_ToPhone(self, ToPhone): + self.ToPhone = ToPhone + def get_ToFax(self): + return self.ToFax + def set_ToFax(self, ToFax): + self.ToFax = ToFax + def get_ToEmail(self): + return self.ToEmail + def set_ToEmail(self, ToEmail): + self.ToEmail = ToEmail + def get_ImportersReferenceNumber(self): + return self.ImportersReferenceNumber + def set_ImportersReferenceNumber(self, ImportersReferenceNumber): + self.ImportersReferenceNumber = ImportersReferenceNumber + def get_NonDeliveryOption(self): + return self.NonDeliveryOption + def set_NonDeliveryOption(self, NonDeliveryOption): + self.NonDeliveryOption = NonDeliveryOption + def get_RedirectName(self): + return self.RedirectName + def set_RedirectName(self, RedirectName): + self.RedirectName = RedirectName + def get_RedirectEmail(self): + return self.RedirectEmail + def set_RedirectEmail(self, RedirectEmail): + self.RedirectEmail = RedirectEmail + def get_RedirectSMS(self): + return self.RedirectSMS + def set_RedirectSMS(self, RedirectSMS): + self.RedirectSMS = RedirectSMS + def get_RedirectAddress(self): + return self.RedirectAddress + def set_RedirectAddress(self, RedirectAddress): + self.RedirectAddress = RedirectAddress + def get_RedirectCity(self): + return self.RedirectCity + def set_RedirectCity(self, RedirectCity): + self.RedirectCity = RedirectCity + def get_RedirectState(self): + return self.RedirectState + def set_RedirectState(self, RedirectState): + self.RedirectState = RedirectState + def get_RedirectZipCode(self): + return self.RedirectZipCode + def set_RedirectZipCode(self, RedirectZipCode): + self.RedirectZipCode = RedirectZipCode + def get_RedirectZip4(self): + return self.RedirectZip4 + def set_RedirectZip4(self, RedirectZip4): + self.RedirectZip4 = RedirectZip4 + def get_Container(self): + return self.Container + def set_Container(self, Container): + self.Container = Container + def get_ShippingContents(self): + return self.ShippingContents + def set_ShippingContents(self, ShippingContents): + self.ShippingContents = ShippingContents + def get_Insured(self): + return self.Insured + def set_Insured(self, Insured): + self.Insured = Insured + def get_InsuredNumber(self): + return self.InsuredNumber + def set_InsuredNumber(self, InsuredNumber): + self.InsuredNumber = InsuredNumber + def get_InsuredAmount(self): + return self.InsuredAmount + def set_InsuredAmount(self, InsuredAmount): + self.InsuredAmount = InsuredAmount + def get_GrossPounds(self): + return self.GrossPounds + def set_GrossPounds(self, GrossPounds): + self.GrossPounds = GrossPounds + def get_GrossOunces(self): + return self.GrossOunces + def set_GrossOunces(self, GrossOunces): + self.GrossOunces = GrossOunces + def get_ContentType(self): + return self.ContentType + def set_ContentType(self, ContentType): + self.ContentType = ContentType + def get_ContentTypeOther(self): + return self.ContentTypeOther + def set_ContentTypeOther(self, ContentTypeOther): + self.ContentTypeOther = ContentTypeOther + def get_Agreement(self): + return self.Agreement + def set_Agreement(self, Agreement): + self.Agreement = Agreement + def get_Comments(self): + return self.Comments + def set_Comments(self, Comments): + self.Comments = Comments + def get_LicenseNumber(self): + return self.LicenseNumber + def set_LicenseNumber(self, LicenseNumber): + self.LicenseNumber = LicenseNumber + def get_CertificateNumber(self): + return self.CertificateNumber + def set_CertificateNumber(self, CertificateNumber): + self.CertificateNumber = CertificateNumber + def get_InvoiceNumber(self): + return self.InvoiceNumber + def set_InvoiceNumber(self, InvoiceNumber): + self.InvoiceNumber = InvoiceNumber + def get_ImageType(self): + return self.ImageType + def set_ImageType(self, ImageType): + self.ImageType = ImageType + def get_ImageLayout(self): + return self.ImageLayout + def set_ImageLayout(self, ImageLayout): + self.ImageLayout = ImageLayout + def get_CustomerRefNo(self): + return self.CustomerRefNo + def set_CustomerRefNo(self, CustomerRefNo): + self.CustomerRefNo = CustomerRefNo + def get_CustomerRefNo2(self): + return self.CustomerRefNo2 + def set_CustomerRefNo2(self, CustomerRefNo2): + self.CustomerRefNo2 = CustomerRefNo2 + def get_POZipCode(self): + return self.POZipCode + def set_POZipCode(self, POZipCode): + self.POZipCode = POZipCode + def get_LabelDate(self): + return self.LabelDate + def set_LabelDate(self, LabelDate): + self.LabelDate = LabelDate + def get_EMCAAccount(self): + return self.EMCAAccount + def set_EMCAAccount(self, EMCAAccount): + self.EMCAAccount = EMCAAccount + def get_HoldForManifest(self): + return self.HoldForManifest + def set_HoldForManifest(self, HoldForManifest): + self.HoldForManifest = HoldForManifest + def get_EELPFC(self): + return self.EELPFC + def set_EELPFC(self, EELPFC): + self.EELPFC = EELPFC + def get_PriceOptions(self): + return self.PriceOptions + def set_PriceOptions(self, PriceOptions): + self.PriceOptions = PriceOptions + def get_Width(self): + return self.Width + def set_Width(self, Width): + self.Width = Width + def get_Length(self): + return self.Length + def set_Length(self, Length): + self.Length = Length + def get_Height(self): + return self.Height + def set_Height(self, Height): + self.Height = Height + def get_Girth(self): + return self.Girth + def set_Girth(self, Girth): + self.Girth = Girth + def get_ExtraServices(self): + return self.ExtraServices + def set_ExtraServices(self, ExtraServices): + self.ExtraServices = ExtraServices + def get_ActionCode(self): + return self.ActionCode + def set_ActionCode(self, ActionCode): + self.ActionCode = ActionCode + def get_OptOutOfSPE(self): + return self.OptOutOfSPE + def set_OptOutOfSPE(self, OptOutOfSPE): + self.OptOutOfSPE = OptOutOfSPE + def get_PermitNumber(self): + return self.PermitNumber + def set_PermitNumber(self, PermitNumber): + self.PermitNumber = PermitNumber + def get_AccountZipCode(self): + return self.AccountZipCode + def set_AccountZipCode(self, AccountZipCode): + self.AccountZipCode = AccountZipCode + def get_ImportersReferenceType(self): + return self.ImportersReferenceType + def set_ImportersReferenceType(self, ImportersReferenceType): + self.ImportersReferenceType = ImportersReferenceType + def get_ImportersTelephoneNumber(self): + return self.ImportersTelephoneNumber + def set_ImportersTelephoneNumber(self, ImportersTelephoneNumber): + self.ImportersTelephoneNumber = ImportersTelephoneNumber + def get_ImportersFaxNumber(self): + return self.ImportersFaxNumber + def set_ImportersFaxNumber(self, ImportersFaxNumber): + self.ImportersFaxNumber = ImportersFaxNumber + def get_ImportersEmail(self): + return self.ImportersEmail + def set_ImportersEmail(self, ImportersEmail): + self.ImportersEmail = ImportersEmail + def get_Machinable(self): + return self.Machinable + def set_Machinable(self, Machinable): + self.Machinable = Machinable + def get_DestinationRateIndicator(self): + return self.DestinationRateIndicator + def set_DestinationRateIndicator(self, DestinationRateIndicator): + self.DestinationRateIndicator = DestinationRateIndicator + def get_MID(self): + return self.MID + def set_MID(self, MID): + self.MID = MID + def get_LogisticsManagerMID(self): + return self.LogisticsManagerMID + def set_LogisticsManagerMID(self, LogisticsManagerMID): + self.LogisticsManagerMID = LogisticsManagerMID + def get_CRID(self): + return self.CRID + def set_CRID(self, CRID): + self.CRID = CRID + def get_VendorCode(self): + return self.VendorCode + def set_VendorCode(self, VendorCode): + self.VendorCode = VendorCode + def get_VendorProductVersionNumber(self): + return self.VendorProductVersionNumber + def set_VendorProductVersionNumber(self, VendorProductVersionNumber): + self.VendorProductVersionNumber = VendorProductVersionNumber + def get_ePostageMailerReporting(self): + return self.ePostageMailerReporting + def set_ePostageMailerReporting(self, ePostageMailerReporting): + self.ePostageMailerReporting = ePostageMailerReporting + def get_SenderFirstName(self): + return self.SenderFirstName + def set_SenderFirstName(self, SenderFirstName): + self.SenderFirstName = SenderFirstName + def get_SenderLastName(self): + return self.SenderLastName + def set_SenderLastName(self, SenderLastName): + self.SenderLastName = SenderLastName + def get_SenderBusinessName(self): + return self.SenderBusinessName + def set_SenderBusinessName(self, SenderBusinessName): + self.SenderBusinessName = SenderBusinessName + def get_SenderAddress1(self): + return self.SenderAddress1 + def set_SenderAddress1(self, SenderAddress1): + self.SenderAddress1 = SenderAddress1 + def get_SenderCity(self): + return self.SenderCity + def set_SenderCity(self, SenderCity): + self.SenderCity = SenderCity + def get_SenderState(self): + return self.SenderState + def set_SenderState(self, SenderState): + self.SenderState = SenderState + def get_SenderZip5(self): + return self.SenderZip5 + def set_SenderZip5(self, SenderZip5): + self.SenderZip5 = SenderZip5 + def get_SenderPhone(self): + return self.SenderPhone + def set_SenderPhone(self, SenderPhone): + self.SenderPhone = SenderPhone + def get_SenderEmail(self): + return self.SenderEmail + def set_SenderEmail(self, SenderEmail): + self.SenderEmail = SenderEmail + def get_ChargebackCode(self): + return self.ChargebackCode + def set_ChargebackCode(self, ChargebackCode): + self.ChargebackCode = ChargebackCode + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.Option is not None or + self.Revision is not None or + self.ImageParameters is not None or + self.FromFirstName is not None or + self.FromMiddleInitial is not None or + self.FromLastName is not None or + self.FromFirm is not None or + self.FromAddress1 is not None or + self.FromAddress2 is not None or + self.FromUrbanization is not None or + self.FromCity is not None or + self.FromState is not None or + self.FromZip5 is not None or + self.FromZip4 is not None or + self.FromPhone is not None or + self.FromCustomsReference is not None or + self.ToName is not None or + self.ToFirstName is not None or + self.ToLastName is not None or + self.ToFirm is not None or + self.ToAddress1 is not None or + self.ToAddress2 is not None or + self.ToAddress3 is not None or + self.ToCity is not None or + self.ToProvince is not None or + self.ToCountry is not None or + self.ToPostalCode is not None or + self.ToPOBoxFlag is not None or + self.ToPhone is not None or + self.ToFax is not None or + self.ToEmail is not None or + self.ImportersReferenceNumber is not None or + self.NonDeliveryOption is not None or + self.RedirectName is not None or + self.RedirectEmail is not None or + self.RedirectSMS is not None or + self.RedirectAddress is not None or + self.RedirectCity is not None or + self.RedirectState is not None or + self.RedirectZipCode is not None or + self.RedirectZip4 is not None or + self.Container is not None or + self.ShippingContents is not None or + self.Insured is not None or + self.InsuredNumber is not None or + self.InsuredAmount is not None or + self.GrossPounds is not None or + self.GrossOunces is not None or + self.ContentType is not None or + self.ContentTypeOther is not None or + self.Agreement is not None or + self.Comments is not None or + self.LicenseNumber is not None or + self.CertificateNumber is not None or + self.InvoiceNumber is not None or + self.ImageType is not None or + self.ImageLayout is not None or + self.CustomerRefNo is not None or + self.CustomerRefNo2 is not None or + self.POZipCode is not None or + self.LabelDate is not None or + self.EMCAAccount is not None or + self.HoldForManifest is not None or + self.EELPFC is not None or + self.PriceOptions is not None or + self.Width is not None or + self.Length is not None or + self.Height is not None or + self.Girth is not None or + self.ExtraServices is not None or + self.ActionCode is not None or + self.OptOutOfSPE is not None or + self.PermitNumber is not None or + self.AccountZipCode is not None or + self.ImportersReferenceType is not None or + self.ImportersTelephoneNumber is not None or + self.ImportersFaxNumber is not None or + self.ImportersEmail is not None or + self.Machinable is not None or + self.DestinationRateIndicator is not None or + self.MID is not None or + self.LogisticsManagerMID is not None or + self.CRID is not None or + self.VendorCode is not None or + self.VendorProductVersionNumber is not None or + self.ePostageMailerReporting is not None or + self.SenderFirstName is not None or + self.SenderLastName is not None or + self.SenderBusinessName is not None or + self.SenderAddress1 is not None or + self.SenderCity is not None or + self.SenderState is not None or + self.SenderZip5 is not None or + self.SenderPhone is not None or + self.SenderEmail is not None or + self.ChargebackCode is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSPriorityMailIntlRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('eVSPriorityMailIntlRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'eVSPriorityMailIntlRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='eVSPriorityMailIntlRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='eVSPriorityMailIntlRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='eVSPriorityMailIntlRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSPriorityMailIntlRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Option is not None: + namespaceprefix_ = self.Option_nsprefix_ + ':' if (UseCapturedNS_ and self.Option_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOption>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Option), input_name='Option')), namespaceprefix_ , eol_)) + if self.Revision is not None: + namespaceprefix_ = self.Revision_nsprefix_ + ':' if (UseCapturedNS_ and self.Revision_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRevision>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Revision), input_name='Revision')), namespaceprefix_ , eol_)) + if self.ImageParameters is not None: + namespaceprefix_ = self.ImageParameters_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageParameters_nsprefix_) else '' + self.ImageParameters.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ImageParameters', pretty_print=pretty_print) + if self.FromFirstName is not None: + namespaceprefix_ = self.FromFirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.FromFirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromFirstName), input_name='FromFirstName')), namespaceprefix_ , eol_)) + if self.FromMiddleInitial is not None: + namespaceprefix_ = self.FromMiddleInitial_nsprefix_ + ':' if (UseCapturedNS_ and self.FromMiddleInitial_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromMiddleInitial>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromMiddleInitial), input_name='FromMiddleInitial')), namespaceprefix_ , eol_)) + if self.FromLastName is not None: + namespaceprefix_ = self.FromLastName_nsprefix_ + ':' if (UseCapturedNS_ and self.FromLastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromLastName), input_name='FromLastName')), namespaceprefix_ , eol_)) + if self.FromFirm is not None: + namespaceprefix_ = self.FromFirm_nsprefix_ + ':' if (UseCapturedNS_ and self.FromFirm_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromFirm>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromFirm), input_name='FromFirm')), namespaceprefix_ , eol_)) + if self.FromAddress1 is not None: + namespaceprefix_ = self.FromAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.FromAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromAddress1), input_name='FromAddress1')), namespaceprefix_ , eol_)) + if self.FromAddress2 is not None: + namespaceprefix_ = self.FromAddress2_nsprefix_ + ':' if (UseCapturedNS_ and self.FromAddress2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromAddress2), input_name='FromAddress2')), namespaceprefix_ , eol_)) + if self.FromUrbanization is not None: + namespaceprefix_ = self.FromUrbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.FromUrbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromUrbanization), input_name='FromUrbanization')), namespaceprefix_ , eol_)) + if self.FromCity is not None: + namespaceprefix_ = self.FromCity_nsprefix_ + ':' if (UseCapturedNS_ and self.FromCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromCity), input_name='FromCity')), namespaceprefix_ , eol_)) + if self.FromState is not None: + namespaceprefix_ = self.FromState_nsprefix_ + ':' if (UseCapturedNS_ and self.FromState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromState), input_name='FromState')), namespaceprefix_ , eol_)) + if self.FromZip5 is not None: + namespaceprefix_ = self.FromZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.FromZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromZip5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromZip5), input_name='FromZip5')), namespaceprefix_ , eol_)) + if self.FromZip4 is not None: + namespaceprefix_ = self.FromZip4_nsprefix_ + ':' if (UseCapturedNS_ and self.FromZip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromZip4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromZip4), input_name='FromZip4')), namespaceprefix_ , eol_)) + if self.FromPhone is not None: + namespaceprefix_ = self.FromPhone_nsprefix_ + ':' if (UseCapturedNS_ and self.FromPhone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromPhone), input_name='FromPhone')), namespaceprefix_ , eol_)) + if self.FromCustomsReference is not None: + namespaceprefix_ = self.FromCustomsReference_nsprefix_ + ':' if (UseCapturedNS_ and self.FromCustomsReference_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromCustomsReference>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromCustomsReference), input_name='FromCustomsReference')), namespaceprefix_ , eol_)) + if self.ToName is not None: + namespaceprefix_ = self.ToName_nsprefix_ + ':' if (UseCapturedNS_ and self.ToName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToName), input_name='ToName')), namespaceprefix_ , eol_)) + if self.ToFirstName is not None: + namespaceprefix_ = self.ToFirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.ToFirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToFirstName), input_name='ToFirstName')), namespaceprefix_ , eol_)) + if self.ToLastName is not None: + namespaceprefix_ = self.ToLastName_nsprefix_ + ':' if (UseCapturedNS_ and self.ToLastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToLastName), input_name='ToLastName')), namespaceprefix_ , eol_)) + if self.ToFirm is not None: + namespaceprefix_ = self.ToFirm_nsprefix_ + ':' if (UseCapturedNS_ and self.ToFirm_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToFirm>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToFirm), input_name='ToFirm')), namespaceprefix_ , eol_)) + if self.ToAddress1 is not None: + namespaceprefix_ = self.ToAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress1), input_name='ToAddress1')), namespaceprefix_ , eol_)) + if self.ToAddress2 is not None: + namespaceprefix_ = self.ToAddress2_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress2), input_name='ToAddress2')), namespaceprefix_ , eol_)) + if self.ToAddress3 is not None: + namespaceprefix_ = self.ToAddress3_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress3_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress3>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress3), input_name='ToAddress3')), namespaceprefix_ , eol_)) + if self.ToCity is not None: + namespaceprefix_ = self.ToCity_nsprefix_ + ':' if (UseCapturedNS_ and self.ToCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToCity), input_name='ToCity')), namespaceprefix_ , eol_)) + if self.ToProvince is not None: + namespaceprefix_ = self.ToProvince_nsprefix_ + ':' if (UseCapturedNS_ and self.ToProvince_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToProvince>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToProvince), input_name='ToProvince')), namespaceprefix_ , eol_)) + if self.ToCountry is not None: + namespaceprefix_ = self.ToCountry_nsprefix_ + ':' if (UseCapturedNS_ and self.ToCountry_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToCountry>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToCountry), input_name='ToCountry')), namespaceprefix_ , eol_)) + if self.ToPostalCode is not None: + namespaceprefix_ = self.ToPostalCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ToPostalCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToPostalCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToPostalCode), input_name='ToPostalCode')), namespaceprefix_ , eol_)) + if self.ToPOBoxFlag is not None: + namespaceprefix_ = self.ToPOBoxFlag_nsprefix_ + ':' if (UseCapturedNS_ and self.ToPOBoxFlag_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToPOBoxFlag>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToPOBoxFlag), input_name='ToPOBoxFlag')), namespaceprefix_ , eol_)) + if self.ToPhone is not None: + namespaceprefix_ = self.ToPhone_nsprefix_ + ':' if (UseCapturedNS_ and self.ToPhone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToPhone), input_name='ToPhone')), namespaceprefix_ , eol_)) + if self.ToFax is not None: + namespaceprefix_ = self.ToFax_nsprefix_ + ':' if (UseCapturedNS_ and self.ToFax_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToFax>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToFax), input_name='ToFax')), namespaceprefix_ , eol_)) + if self.ToEmail is not None: + namespaceprefix_ = self.ToEmail_nsprefix_ + ':' if (UseCapturedNS_ and self.ToEmail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToEmail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToEmail), input_name='ToEmail')), namespaceprefix_ , eol_)) + if self.ImportersReferenceNumber is not None: + namespaceprefix_ = self.ImportersReferenceNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.ImportersReferenceNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImportersReferenceNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImportersReferenceNumber), input_name='ImportersReferenceNumber')), namespaceprefix_ , eol_)) + if self.NonDeliveryOption is not None: + namespaceprefix_ = self.NonDeliveryOption_nsprefix_ + ':' if (UseCapturedNS_ and self.NonDeliveryOption_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNonDeliveryOption>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.NonDeliveryOption), input_name='NonDeliveryOption')), namespaceprefix_ , eol_)) + if self.RedirectName is not None: + namespaceprefix_ = self.RedirectName_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectName), input_name='RedirectName')), namespaceprefix_ , eol_)) + if self.RedirectEmail is not None: + namespaceprefix_ = self.RedirectEmail_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectEmail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectEmail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectEmail), input_name='RedirectEmail')), namespaceprefix_ , eol_)) + if self.RedirectSMS is not None: + namespaceprefix_ = self.RedirectSMS_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectSMS_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectSMS>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectSMS), input_name='RedirectSMS')), namespaceprefix_ , eol_)) + if self.RedirectAddress is not None: + namespaceprefix_ = self.RedirectAddress_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectAddress_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectAddress>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectAddress), input_name='RedirectAddress')), namespaceprefix_ , eol_)) + if self.RedirectCity is not None: + namespaceprefix_ = self.RedirectCity_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectCity), input_name='RedirectCity')), namespaceprefix_ , eol_)) + if self.RedirectState is not None: + namespaceprefix_ = self.RedirectState_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectState), input_name='RedirectState')), namespaceprefix_ , eol_)) + if self.RedirectZipCode is not None: + namespaceprefix_ = self.RedirectZipCode_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectZipCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectZipCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectZipCode), input_name='RedirectZipCode')), namespaceprefix_ , eol_)) + if self.RedirectZip4 is not None: + namespaceprefix_ = self.RedirectZip4_nsprefix_ + ':' if (UseCapturedNS_ and self.RedirectZip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRedirectZip4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RedirectZip4), input_name='RedirectZip4')), namespaceprefix_ , eol_)) + if self.Container is not None: + namespaceprefix_ = self.Container_nsprefix_ + ':' if (UseCapturedNS_ and self.Container_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContainer>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Container), input_name='Container')), namespaceprefix_ , eol_)) + if self.ShippingContents is not None: + namespaceprefix_ = self.ShippingContents_nsprefix_ + ':' if (UseCapturedNS_ and self.ShippingContents_nsprefix_) else '' + self.ShippingContents.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ShippingContents', pretty_print=pretty_print) + if self.Insured is not None: + namespaceprefix_ = self.Insured_nsprefix_ + ':' if (UseCapturedNS_ and self.Insured_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInsured>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.Insured, input_name='Insured'), namespaceprefix_ , eol_)) + if self.InsuredNumber is not None: + namespaceprefix_ = self.InsuredNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.InsuredNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInsuredNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InsuredNumber), input_name='InsuredNumber')), namespaceprefix_ , eol_)) + if self.InsuredAmount is not None: + namespaceprefix_ = self.InsuredAmount_nsprefix_ + ':' if (UseCapturedNS_ and self.InsuredAmount_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInsuredAmount>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InsuredAmount), input_name='InsuredAmount')), namespaceprefix_ , eol_)) + if self.GrossPounds is not None: + namespaceprefix_ = self.GrossPounds_nsprefix_ + ':' if (UseCapturedNS_ and self.GrossPounds_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGrossPounds>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.GrossPounds, input_name='GrossPounds'), namespaceprefix_ , eol_)) + if self.GrossOunces is not None: + namespaceprefix_ = self.GrossOunces_nsprefix_ + ':' if (UseCapturedNS_ and self.GrossOunces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGrossOunces>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.GrossOunces, input_name='GrossOunces'), namespaceprefix_ , eol_)) + if self.ContentType is not None: + namespaceprefix_ = self.ContentType_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentType), input_name='ContentType')), namespaceprefix_ , eol_)) + if self.ContentTypeOther is not None: + namespaceprefix_ = self.ContentTypeOther_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentTypeOther_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentTypeOther>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentTypeOther), input_name='ContentTypeOther')), namespaceprefix_ , eol_)) + if self.Agreement is not None: + namespaceprefix_ = self.Agreement_nsprefix_ + ':' if (UseCapturedNS_ and self.Agreement_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAgreement>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Agreement), input_name='Agreement')), namespaceprefix_ , eol_)) + if self.Comments is not None: + namespaceprefix_ = self.Comments_nsprefix_ + ':' if (UseCapturedNS_ and self.Comments_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sComments>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Comments), input_name='Comments')), namespaceprefix_ , eol_)) + if self.LicenseNumber is not None: + namespaceprefix_ = self.LicenseNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.LicenseNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLicenseNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LicenseNumber), input_name='LicenseNumber')), namespaceprefix_ , eol_)) + if self.CertificateNumber is not None: + namespaceprefix_ = self.CertificateNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.CertificateNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCertificateNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CertificateNumber), input_name='CertificateNumber')), namespaceprefix_ , eol_)) + if self.InvoiceNumber is not None: + namespaceprefix_ = self.InvoiceNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.InvoiceNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInvoiceNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InvoiceNumber), input_name='InvoiceNumber')), namespaceprefix_ , eol_)) + if self.ImageType is not None: + namespaceprefix_ = self.ImageType_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageType), input_name='ImageType')), namespaceprefix_ , eol_)) + if self.ImageLayout is not None: + namespaceprefix_ = self.ImageLayout_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageLayout_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageLayout>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageLayout), input_name='ImageLayout')), namespaceprefix_ , eol_)) + if self.CustomerRefNo is not None: + namespaceprefix_ = self.CustomerRefNo_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerRefNo_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerRefNo>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerRefNo), input_name='CustomerRefNo')), namespaceprefix_ , eol_)) + if self.CustomerRefNo2 is not None: + namespaceprefix_ = self.CustomerRefNo2_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerRefNo2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerRefNo2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerRefNo2), input_name='CustomerRefNo2')), namespaceprefix_ , eol_)) + if self.POZipCode is not None: + namespaceprefix_ = self.POZipCode_nsprefix_ + ':' if (UseCapturedNS_ and self.POZipCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPOZipCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.POZipCode), input_name='POZipCode')), namespaceprefix_ , eol_)) + if self.LabelDate is not None: + namespaceprefix_ = self.LabelDate_nsprefix_ + ':' if (UseCapturedNS_ and self.LabelDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLabelDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LabelDate), input_name='LabelDate')), namespaceprefix_ , eol_)) + if self.EMCAAccount is not None: + namespaceprefix_ = self.EMCAAccount_nsprefix_ + ':' if (UseCapturedNS_ and self.EMCAAccount_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEMCAAccount>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EMCAAccount), input_name='EMCAAccount')), namespaceprefix_ , eol_)) + if self.HoldForManifest is not None: + namespaceprefix_ = self.HoldForManifest_nsprefix_ + ':' if (UseCapturedNS_ and self.HoldForManifest_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHoldForManifest>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.HoldForManifest), input_name='HoldForManifest')), namespaceprefix_ , eol_)) + if self.EELPFC is not None: + namespaceprefix_ = self.EELPFC_nsprefix_ + ':' if (UseCapturedNS_ and self.EELPFC_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEELPFC>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EELPFC), input_name='EELPFC')), namespaceprefix_ , eol_)) + if self.PriceOptions is not None: + namespaceprefix_ = self.PriceOptions_nsprefix_ + ':' if (UseCapturedNS_ and self.PriceOptions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPriceOptions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PriceOptions), input_name='PriceOptions')), namespaceprefix_ , eol_)) + if self.Width is not None: + namespaceprefix_ = self.Width_nsprefix_ + ':' if (UseCapturedNS_ and self.Width_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sWidth>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Width), input_name='Width')), namespaceprefix_ , eol_)) + if self.Length is not None: + namespaceprefix_ = self.Length_nsprefix_ + ':' if (UseCapturedNS_ and self.Length_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLength>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Length), input_name='Length')), namespaceprefix_ , eol_)) + if self.Height is not None: + namespaceprefix_ = self.Height_nsprefix_ + ':' if (UseCapturedNS_ and self.Height_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHeight>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Height), input_name='Height')), namespaceprefix_ , eol_)) + if self.Girth is not None: + namespaceprefix_ = self.Girth_nsprefix_ + ':' if (UseCapturedNS_ and self.Girth_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGirth>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Girth), input_name='Girth')), namespaceprefix_ , eol_)) + if self.ExtraServices is not None: + namespaceprefix_ = self.ExtraServices_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraServices_nsprefix_) else '' + self.ExtraServices.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ExtraServices', pretty_print=pretty_print) + if self.ActionCode is not None: + namespaceprefix_ = self.ActionCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ActionCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sActionCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ActionCode), input_name='ActionCode')), namespaceprefix_ , eol_)) + if self.OptOutOfSPE is not None: + namespaceprefix_ = self.OptOutOfSPE_nsprefix_ + ':' if (UseCapturedNS_ and self.OptOutOfSPE_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOptOutOfSPE>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.OptOutOfSPE, input_name='OptOutOfSPE'), namespaceprefix_ , eol_)) + if self.PermitNumber is not None: + namespaceprefix_ = self.PermitNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PermitNumber), input_name='PermitNumber')), namespaceprefix_ , eol_)) + if self.AccountZipCode is not None: + namespaceprefix_ = self.AccountZipCode_nsprefix_ + ':' if (UseCapturedNS_ and self.AccountZipCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAccountZipCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AccountZipCode), input_name='AccountZipCode')), namespaceprefix_ , eol_)) + if self.ImportersReferenceType is not None: + namespaceprefix_ = self.ImportersReferenceType_nsprefix_ + ':' if (UseCapturedNS_ and self.ImportersReferenceType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImportersReferenceType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImportersReferenceType), input_name='ImportersReferenceType')), namespaceprefix_ , eol_)) + if self.ImportersTelephoneNumber is not None: + namespaceprefix_ = self.ImportersTelephoneNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.ImportersTelephoneNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImportersTelephoneNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImportersTelephoneNumber), input_name='ImportersTelephoneNumber')), namespaceprefix_ , eol_)) + if self.ImportersFaxNumber is not None: + namespaceprefix_ = self.ImportersFaxNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.ImportersFaxNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImportersFaxNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImportersFaxNumber), input_name='ImportersFaxNumber')), namespaceprefix_ , eol_)) + if self.ImportersEmail is not None: + namespaceprefix_ = self.ImportersEmail_nsprefix_ + ':' if (UseCapturedNS_ and self.ImportersEmail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImportersEmail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImportersEmail), input_name='ImportersEmail')), namespaceprefix_ , eol_)) + if self.Machinable is not None: + namespaceprefix_ = self.Machinable_nsprefix_ + ':' if (UseCapturedNS_ and self.Machinable_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMachinable>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.Machinable, input_name='Machinable'), namespaceprefix_ , eol_)) + if self.DestinationRateIndicator is not None: + namespaceprefix_ = self.DestinationRateIndicator_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationRateIndicator_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationRateIndicator>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DestinationRateIndicator), input_name='DestinationRateIndicator')), namespaceprefix_ , eol_)) + if self.MID is not None: + namespaceprefix_ = self.MID_nsprefix_ + ':' if (UseCapturedNS_ and self.MID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MID), input_name='MID')), namespaceprefix_ , eol_)) + if self.LogisticsManagerMID is not None: + namespaceprefix_ = self.LogisticsManagerMID_nsprefix_ + ':' if (UseCapturedNS_ and self.LogisticsManagerMID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLogisticsManagerMID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LogisticsManagerMID), input_name='LogisticsManagerMID')), namespaceprefix_ , eol_)) + if self.CRID is not None: + namespaceprefix_ = self.CRID_nsprefix_ + ':' if (UseCapturedNS_ and self.CRID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCRID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CRID), input_name='CRID')), namespaceprefix_ , eol_)) + if self.VendorCode is not None: + namespaceprefix_ = self.VendorCode_nsprefix_ + ':' if (UseCapturedNS_ and self.VendorCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sVendorCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.VendorCode), input_name='VendorCode')), namespaceprefix_ , eol_)) + if self.VendorProductVersionNumber is not None: + namespaceprefix_ = self.VendorProductVersionNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.VendorProductVersionNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sVendorProductVersionNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.VendorProductVersionNumber), input_name='VendorProductVersionNumber')), namespaceprefix_ , eol_)) + if self.ePostageMailerReporting is not None: + namespaceprefix_ = self.ePostageMailerReporting_nsprefix_ + ':' if (UseCapturedNS_ and self.ePostageMailerReporting_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sePostageMailerReporting>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ePostageMailerReporting), input_name='ePostageMailerReporting')), namespaceprefix_ , eol_)) + if self.SenderFirstName is not None: + namespaceprefix_ = self.SenderFirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderFirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderFirstName), input_name='SenderFirstName')), namespaceprefix_ , eol_)) + if self.SenderLastName is not None: + namespaceprefix_ = self.SenderLastName_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderLastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderLastName), input_name='SenderLastName')), namespaceprefix_ , eol_)) + if self.SenderBusinessName is not None: + namespaceprefix_ = self.SenderBusinessName_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderBusinessName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderBusinessName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderBusinessName), input_name='SenderBusinessName')), namespaceprefix_ , eol_)) + if self.SenderAddress1 is not None: + namespaceprefix_ = self.SenderAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderAddress1), input_name='SenderAddress1')), namespaceprefix_ , eol_)) + if self.SenderCity is not None: + namespaceprefix_ = self.SenderCity_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderCity), input_name='SenderCity')), namespaceprefix_ , eol_)) + if self.SenderState is not None: + namespaceprefix_ = self.SenderState_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderState), input_name='SenderState')), namespaceprefix_ , eol_)) + if self.SenderZip5 is not None: + namespaceprefix_ = self.SenderZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderZip5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderZip5), input_name='SenderZip5')), namespaceprefix_ , eol_)) + if self.SenderPhone is not None: + namespaceprefix_ = self.SenderPhone_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderPhone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderPhone), input_name='SenderPhone')), namespaceprefix_ , eol_)) + if self.SenderEmail is not None: + namespaceprefix_ = self.SenderEmail_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderEmail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderEmail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderEmail), input_name='SenderEmail')), namespaceprefix_ , eol_)) + if self.ChargebackCode is not None: + namespaceprefix_ = self.ChargebackCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ChargebackCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sChargebackCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ChargebackCode), input_name='ChargebackCode')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Option': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Option') + value_ = self.gds_validate_string(value_, node, 'Option') + self.Option = value_ + self.Option_nsprefix_ = child_.prefix + elif nodeName_ == 'Revision': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Revision') + value_ = self.gds_validate_string(value_, node, 'Revision') + self.Revision = value_ + self.Revision_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageParameters': + obj_ = ImageParametersType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ImageParameters = obj_ + obj_.original_tagname_ = 'ImageParameters' + elif nodeName_ == 'FromFirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromFirstName') + value_ = self.gds_validate_string(value_, node, 'FromFirstName') + self.FromFirstName = value_ + self.FromFirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'FromMiddleInitial': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromMiddleInitial') + value_ = self.gds_validate_string(value_, node, 'FromMiddleInitial') + self.FromMiddleInitial = value_ + self.FromMiddleInitial_nsprefix_ = child_.prefix + elif nodeName_ == 'FromLastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromLastName') + value_ = self.gds_validate_string(value_, node, 'FromLastName') + self.FromLastName = value_ + self.FromLastName_nsprefix_ = child_.prefix + elif nodeName_ == 'FromFirm': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromFirm') + value_ = self.gds_validate_string(value_, node, 'FromFirm') + self.FromFirm = value_ + self.FromFirm_nsprefix_ = child_.prefix + elif nodeName_ == 'FromAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromAddress1') + value_ = self.gds_validate_string(value_, node, 'FromAddress1') + self.FromAddress1 = value_ + self.FromAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'FromAddress2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromAddress2') + value_ = self.gds_validate_string(value_, node, 'FromAddress2') + self.FromAddress2 = value_ + self.FromAddress2_nsprefix_ = child_.prefix + elif nodeName_ == 'FromUrbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromUrbanization') + value_ = self.gds_validate_string(value_, node, 'FromUrbanization') + self.FromUrbanization = value_ + self.FromUrbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'FromCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromCity') + value_ = self.gds_validate_string(value_, node, 'FromCity') + self.FromCity = value_ + self.FromCity_nsprefix_ = child_.prefix + elif nodeName_ == 'FromState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromState') + value_ = self.gds_validate_string(value_, node, 'FromState') + self.FromState = value_ + self.FromState_nsprefix_ = child_.prefix + elif nodeName_ == 'FromZip5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromZip5') + value_ = self.gds_validate_string(value_, node, 'FromZip5') + self.FromZip5 = value_ + self.FromZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'FromZip4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromZip4') + value_ = self.gds_validate_string(value_, node, 'FromZip4') + self.FromZip4 = value_ + self.FromZip4_nsprefix_ = child_.prefix + elif nodeName_ == 'FromPhone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromPhone') + value_ = self.gds_validate_string(value_, node, 'FromPhone') + self.FromPhone = value_ + self.FromPhone_nsprefix_ = child_.prefix + elif nodeName_ == 'FromCustomsReference': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromCustomsReference') + value_ = self.gds_validate_string(value_, node, 'FromCustomsReference') + self.FromCustomsReference = value_ + self.FromCustomsReference_nsprefix_ = child_.prefix + elif nodeName_ == 'ToName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToName') + value_ = self.gds_validate_string(value_, node, 'ToName') + self.ToName = value_ + self.ToName_nsprefix_ = child_.prefix + elif nodeName_ == 'ToFirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToFirstName') + value_ = self.gds_validate_string(value_, node, 'ToFirstName') + self.ToFirstName = value_ + self.ToFirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'ToLastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToLastName') + value_ = self.gds_validate_string(value_, node, 'ToLastName') + self.ToLastName = value_ + self.ToLastName_nsprefix_ = child_.prefix + elif nodeName_ == 'ToFirm': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToFirm') + value_ = self.gds_validate_string(value_, node, 'ToFirm') + self.ToFirm = value_ + self.ToFirm_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress1') + value_ = self.gds_validate_string(value_, node, 'ToAddress1') + self.ToAddress1 = value_ + self.ToAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress2') + value_ = self.gds_validate_string(value_, node, 'ToAddress2') + self.ToAddress2 = value_ + self.ToAddress2_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress3': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress3') + value_ = self.gds_validate_string(value_, node, 'ToAddress3') + self.ToAddress3 = value_ + self.ToAddress3_nsprefix_ = child_.prefix + elif nodeName_ == 'ToCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToCity') + value_ = self.gds_validate_string(value_, node, 'ToCity') + self.ToCity = value_ + self.ToCity_nsprefix_ = child_.prefix + elif nodeName_ == 'ToProvince': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToProvince') + value_ = self.gds_validate_string(value_, node, 'ToProvince') + self.ToProvince = value_ + self.ToProvince_nsprefix_ = child_.prefix + elif nodeName_ == 'ToCountry': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToCountry') + value_ = self.gds_validate_string(value_, node, 'ToCountry') + self.ToCountry = value_ + self.ToCountry_nsprefix_ = child_.prefix + elif nodeName_ == 'ToPostalCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToPostalCode') + value_ = self.gds_validate_string(value_, node, 'ToPostalCode') + self.ToPostalCode = value_ + self.ToPostalCode_nsprefix_ = child_.prefix + elif nodeName_ == 'ToPOBoxFlag': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToPOBoxFlag') + value_ = self.gds_validate_string(value_, node, 'ToPOBoxFlag') + self.ToPOBoxFlag = value_ + self.ToPOBoxFlag_nsprefix_ = child_.prefix + elif nodeName_ == 'ToPhone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToPhone') + value_ = self.gds_validate_string(value_, node, 'ToPhone') + self.ToPhone = value_ + self.ToPhone_nsprefix_ = child_.prefix + elif nodeName_ == 'ToFax': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToFax') + value_ = self.gds_validate_string(value_, node, 'ToFax') + self.ToFax = value_ + self.ToFax_nsprefix_ = child_.prefix + elif nodeName_ == 'ToEmail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToEmail') + value_ = self.gds_validate_string(value_, node, 'ToEmail') + self.ToEmail = value_ + self.ToEmail_nsprefix_ = child_.prefix + elif nodeName_ == 'ImportersReferenceNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImportersReferenceNumber') + value_ = self.gds_validate_string(value_, node, 'ImportersReferenceNumber') + self.ImportersReferenceNumber = value_ + self.ImportersReferenceNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'NonDeliveryOption': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'NonDeliveryOption') + value_ = self.gds_validate_string(value_, node, 'NonDeliveryOption') + self.NonDeliveryOption = value_ + self.NonDeliveryOption_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectName') + value_ = self.gds_validate_string(value_, node, 'RedirectName') + self.RedirectName = value_ + self.RedirectName_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectEmail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectEmail') + value_ = self.gds_validate_string(value_, node, 'RedirectEmail') + self.RedirectEmail = value_ + self.RedirectEmail_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectSMS': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectSMS') + value_ = self.gds_validate_string(value_, node, 'RedirectSMS') + self.RedirectSMS = value_ + self.RedirectSMS_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectAddress': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectAddress') + value_ = self.gds_validate_string(value_, node, 'RedirectAddress') + self.RedirectAddress = value_ + self.RedirectAddress_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectCity') + value_ = self.gds_validate_string(value_, node, 'RedirectCity') + self.RedirectCity = value_ + self.RedirectCity_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectState') + value_ = self.gds_validate_string(value_, node, 'RedirectState') + self.RedirectState = value_ + self.RedirectState_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectZipCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectZipCode') + value_ = self.gds_validate_string(value_, node, 'RedirectZipCode') + self.RedirectZipCode = value_ + self.RedirectZipCode_nsprefix_ = child_.prefix + elif nodeName_ == 'RedirectZip4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RedirectZip4') + value_ = self.gds_validate_string(value_, node, 'RedirectZip4') + self.RedirectZip4 = value_ + self.RedirectZip4_nsprefix_ = child_.prefix + elif nodeName_ == 'Container': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Container') + value_ = self.gds_validate_string(value_, node, 'Container') + self.Container = value_ + self.Container_nsprefix_ = child_.prefix + elif nodeName_ == 'ShippingContents': + obj_ = ShippingContentsType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ShippingContents = obj_ + obj_.original_tagname_ = 'ShippingContents' + elif nodeName_ == 'Insured': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'Insured') + ival_ = self.gds_validate_boolean(ival_, node, 'Insured') + self.Insured = ival_ + self.Insured_nsprefix_ = child_.prefix + elif nodeName_ == 'InsuredNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'InsuredNumber') + value_ = self.gds_validate_string(value_, node, 'InsuredNumber') + self.InsuredNumber = value_ + self.InsuredNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'InsuredAmount': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'InsuredAmount') + value_ = self.gds_validate_string(value_, node, 'InsuredAmount') + self.InsuredAmount = value_ + self.InsuredAmount_nsprefix_ = child_.prefix + elif nodeName_ == 'GrossPounds' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'GrossPounds') + ival_ = self.gds_validate_integer(ival_, node, 'GrossPounds') + self.GrossPounds = ival_ + self.GrossPounds_nsprefix_ = child_.prefix + elif nodeName_ == 'GrossOunces' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'GrossOunces') + ival_ = self.gds_validate_integer(ival_, node, 'GrossOunces') + self.GrossOunces = ival_ + self.GrossOunces_nsprefix_ = child_.prefix + elif nodeName_ == 'ContentType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentType') + value_ = self.gds_validate_string(value_, node, 'ContentType') + self.ContentType = value_ + self.ContentType_nsprefix_ = child_.prefix + elif nodeName_ == 'ContentTypeOther': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentTypeOther') + value_ = self.gds_validate_string(value_, node, 'ContentTypeOther') + self.ContentTypeOther = value_ + self.ContentTypeOther_nsprefix_ = child_.prefix + elif nodeName_ == 'Agreement': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Agreement') + value_ = self.gds_validate_string(value_, node, 'Agreement') + self.Agreement = value_ + self.Agreement_nsprefix_ = child_.prefix + elif nodeName_ == 'Comments': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Comments') + value_ = self.gds_validate_string(value_, node, 'Comments') + self.Comments = value_ + self.Comments_nsprefix_ = child_.prefix + elif nodeName_ == 'LicenseNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LicenseNumber') + value_ = self.gds_validate_string(value_, node, 'LicenseNumber') + self.LicenseNumber = value_ + self.LicenseNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'CertificateNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CertificateNumber') + value_ = self.gds_validate_string(value_, node, 'CertificateNumber') + self.CertificateNumber = value_ + self.CertificateNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'InvoiceNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'InvoiceNumber') + value_ = self.gds_validate_string(value_, node, 'InvoiceNumber') + self.InvoiceNumber = value_ + self.InvoiceNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageType') + value_ = self.gds_validate_string(value_, node, 'ImageType') + self.ImageType = value_ + self.ImageType_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageLayout': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageLayout') + value_ = self.gds_validate_string(value_, node, 'ImageLayout') + self.ImageLayout = value_ + self.ImageLayout_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerRefNo': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerRefNo') + value_ = self.gds_validate_string(value_, node, 'CustomerRefNo') + self.CustomerRefNo = value_ + self.CustomerRefNo_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerRefNo2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerRefNo2') + value_ = self.gds_validate_string(value_, node, 'CustomerRefNo2') + self.CustomerRefNo2 = value_ + self.CustomerRefNo2_nsprefix_ = child_.prefix + elif nodeName_ == 'POZipCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'POZipCode') + value_ = self.gds_validate_string(value_, node, 'POZipCode') + self.POZipCode = value_ + self.POZipCode_nsprefix_ = child_.prefix + elif nodeName_ == 'LabelDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LabelDate') + value_ = self.gds_validate_string(value_, node, 'LabelDate') + self.LabelDate = value_ + self.LabelDate_nsprefix_ = child_.prefix + elif nodeName_ == 'EMCAAccount': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EMCAAccount') + value_ = self.gds_validate_string(value_, node, 'EMCAAccount') + self.EMCAAccount = value_ + self.EMCAAccount_nsprefix_ = child_.prefix + elif nodeName_ == 'HoldForManifest': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'HoldForManifest') + value_ = self.gds_validate_string(value_, node, 'HoldForManifest') + self.HoldForManifest = value_ + self.HoldForManifest_nsprefix_ = child_.prefix + elif nodeName_ == 'EELPFC': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EELPFC') + value_ = self.gds_validate_string(value_, node, 'EELPFC') + self.EELPFC = value_ + self.EELPFC_nsprefix_ = child_.prefix + elif nodeName_ == 'PriceOptions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PriceOptions') + value_ = self.gds_validate_string(value_, node, 'PriceOptions') + self.PriceOptions = value_ + self.PriceOptions_nsprefix_ = child_.prefix + elif nodeName_ == 'Width': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Width') + value_ = self.gds_validate_string(value_, node, 'Width') + self.Width = value_ + self.Width_nsprefix_ = child_.prefix + elif nodeName_ == 'Length': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Length') + value_ = self.gds_validate_string(value_, node, 'Length') + self.Length = value_ + self.Length_nsprefix_ = child_.prefix + elif nodeName_ == 'Height': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Height') + value_ = self.gds_validate_string(value_, node, 'Height') + self.Height = value_ + self.Height_nsprefix_ = child_.prefix + elif nodeName_ == 'Girth': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Girth') + value_ = self.gds_validate_string(value_, node, 'Girth') + self.Girth = value_ + self.Girth_nsprefix_ = child_.prefix + elif nodeName_ == 'ExtraServices': + obj_ = ExtraServicesType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ExtraServices = obj_ + obj_.original_tagname_ = 'ExtraServices' + elif nodeName_ == 'ActionCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ActionCode') + value_ = self.gds_validate_string(value_, node, 'ActionCode') + self.ActionCode = value_ + self.ActionCode_nsprefix_ = child_.prefix + elif nodeName_ == 'OptOutOfSPE': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'OptOutOfSPE') + ival_ = self.gds_validate_boolean(ival_, node, 'OptOutOfSPE') + self.OptOutOfSPE = ival_ + self.OptOutOfSPE_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PermitNumber') + value_ = self.gds_validate_string(value_, node, 'PermitNumber') + self.PermitNumber = value_ + self.PermitNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'AccountZipCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AccountZipCode') + value_ = self.gds_validate_string(value_, node, 'AccountZipCode') + self.AccountZipCode = value_ + self.AccountZipCode_nsprefix_ = child_.prefix + elif nodeName_ == 'ImportersReferenceType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImportersReferenceType') + value_ = self.gds_validate_string(value_, node, 'ImportersReferenceType') + self.ImportersReferenceType = value_ + self.ImportersReferenceType_nsprefix_ = child_.prefix + elif nodeName_ == 'ImportersTelephoneNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImportersTelephoneNumber') + value_ = self.gds_validate_string(value_, node, 'ImportersTelephoneNumber') + self.ImportersTelephoneNumber = value_ + self.ImportersTelephoneNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'ImportersFaxNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImportersFaxNumber') + value_ = self.gds_validate_string(value_, node, 'ImportersFaxNumber') + self.ImportersFaxNumber = value_ + self.ImportersFaxNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'ImportersEmail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImportersEmail') + value_ = self.gds_validate_string(value_, node, 'ImportersEmail') + self.ImportersEmail = value_ + self.ImportersEmail_nsprefix_ = child_.prefix + elif nodeName_ == 'Machinable': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'Machinable') + ival_ = self.gds_validate_boolean(ival_, node, 'Machinable') + self.Machinable = ival_ + self.Machinable_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationRateIndicator': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DestinationRateIndicator') + value_ = self.gds_validate_string(value_, node, 'DestinationRateIndicator') + self.DestinationRateIndicator = value_ + self.DestinationRateIndicator_nsprefix_ = child_.prefix + elif nodeName_ == 'MID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'MID') + value_ = self.gds_validate_string(value_, node, 'MID') + self.MID = value_ + self.MID_nsprefix_ = child_.prefix + elif nodeName_ == 'LogisticsManagerMID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LogisticsManagerMID') + value_ = self.gds_validate_string(value_, node, 'LogisticsManagerMID') + self.LogisticsManagerMID = value_ + self.LogisticsManagerMID_nsprefix_ = child_.prefix + elif nodeName_ == 'CRID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CRID') + value_ = self.gds_validate_string(value_, node, 'CRID') + self.CRID = value_ + self.CRID_nsprefix_ = child_.prefix + elif nodeName_ == 'VendorCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'VendorCode') + value_ = self.gds_validate_string(value_, node, 'VendorCode') + self.VendorCode = value_ + self.VendorCode_nsprefix_ = child_.prefix + elif nodeName_ == 'VendorProductVersionNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'VendorProductVersionNumber') + value_ = self.gds_validate_string(value_, node, 'VendorProductVersionNumber') + self.VendorProductVersionNumber = value_ + self.VendorProductVersionNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'ePostageMailerReporting': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ePostageMailerReporting') + value_ = self.gds_validate_string(value_, node, 'ePostageMailerReporting') + self.ePostageMailerReporting = value_ + self.ePostageMailerReporting_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderFirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderFirstName') + value_ = self.gds_validate_string(value_, node, 'SenderFirstName') + self.SenderFirstName = value_ + self.SenderFirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderLastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderLastName') + value_ = self.gds_validate_string(value_, node, 'SenderLastName') + self.SenderLastName = value_ + self.SenderLastName_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderBusinessName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderBusinessName') + value_ = self.gds_validate_string(value_, node, 'SenderBusinessName') + self.SenderBusinessName = value_ + self.SenderBusinessName_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderAddress1') + value_ = self.gds_validate_string(value_, node, 'SenderAddress1') + self.SenderAddress1 = value_ + self.SenderAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderCity') + value_ = self.gds_validate_string(value_, node, 'SenderCity') + self.SenderCity = value_ + self.SenderCity_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderState') + value_ = self.gds_validate_string(value_, node, 'SenderState') + self.SenderState = value_ + self.SenderState_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderZip5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderZip5') + value_ = self.gds_validate_string(value_, node, 'SenderZip5') + self.SenderZip5 = value_ + self.SenderZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderPhone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderPhone') + value_ = self.gds_validate_string(value_, node, 'SenderPhone') + self.SenderPhone = value_ + self.SenderPhone_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderEmail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderEmail') + value_ = self.gds_validate_string(value_, node, 'SenderEmail') + self.SenderEmail = value_ + self.SenderEmail_nsprefix_ = child_.prefix + elif nodeName_ == 'ChargebackCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ChargebackCode') + value_ = self.gds_validate_string(value_, node, 'ChargebackCode') + self.ChargebackCode = value_ + self.ChargebackCode_nsprefix_ = child_.prefix +# end class eVSPriorityMailIntlRequest + + +class ImageParametersType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ImageParameter=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ImageParameter = ImageParameter + self.ImageParameter_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ImageParametersType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ImageParametersType.subclass: + return ImageParametersType.subclass(*args_, **kwargs_) + else: + return ImageParametersType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ImageParameter(self): + return self.ImageParameter + def set_ImageParameter(self, ImageParameter): + self.ImageParameter = ImageParameter + def has__content(self): + if ( + self.ImageParameter is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ImageParametersType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ImageParametersType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ImageParametersType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ImageParametersType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ImageParametersType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ImageParametersType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ImageParametersType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ImageParameter is not None: + namespaceprefix_ = self.ImageParameter_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageParameter_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageParameter>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageParameter), input_name='ImageParameter')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ImageParameter': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageParameter') + value_ = self.gds_validate_string(value_, node, 'ImageParameter') + self.ImageParameter = value_ + self.ImageParameter_nsprefix_ = child_.prefix +# end class ImageParametersType + + +class ShippingContentsType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ItemDetail=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if ItemDetail is None: + self.ItemDetail = [] + else: + self.ItemDetail = ItemDetail + self.ItemDetail_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ShippingContentsType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ShippingContentsType.subclass: + return ShippingContentsType.subclass(*args_, **kwargs_) + else: + return ShippingContentsType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ItemDetail(self): + return self.ItemDetail + def set_ItemDetail(self, ItemDetail): + self.ItemDetail = ItemDetail + def add_ItemDetail(self, value): + self.ItemDetail.append(value) + def insert_ItemDetail_at(self, index, value): + self.ItemDetail.insert(index, value) + def replace_ItemDetail_at(self, index, value): + self.ItemDetail[index] = value + def has__content(self): + if ( + self.ItemDetail + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ShippingContentsType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ShippingContentsType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ShippingContentsType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ShippingContentsType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ShippingContentsType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ShippingContentsType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ShippingContentsType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for ItemDetail_ in self.ItemDetail: + namespaceprefix_ = self.ItemDetail_nsprefix_ + ':' if (UseCapturedNS_ and self.ItemDetail_nsprefix_) else '' + ItemDetail_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ItemDetail', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ItemDetail': + obj_ = ItemDetailType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ItemDetail.append(obj_) + obj_.original_tagname_ = 'ItemDetail' +# end class ShippingContentsType + + +class ItemDetailType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Description=None, Quantity=None, Value=None, NetPounds=None, NetOunces=None, HSTariffNumber=None, CountryOfOrigin=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Description = Description + self.Description_nsprefix_ = None + self.Quantity = Quantity + self.Quantity_nsprefix_ = None + self.Value = Value + self.Value_nsprefix_ = None + self.NetPounds = NetPounds + self.NetPounds_nsprefix_ = None + self.NetOunces = NetOunces + self.NetOunces_nsprefix_ = None + self.HSTariffNumber = HSTariffNumber + self.HSTariffNumber_nsprefix_ = None + self.CountryOfOrigin = CountryOfOrigin + self.CountryOfOrigin_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ItemDetailType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ItemDetailType.subclass: + return ItemDetailType.subclass(*args_, **kwargs_) + else: + return ItemDetailType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Description(self): + return self.Description + def set_Description(self, Description): + self.Description = Description + def get_Quantity(self): + return self.Quantity + def set_Quantity(self, Quantity): + self.Quantity = Quantity + def get_Value(self): + return self.Value + def set_Value(self, Value): + self.Value = Value + def get_NetPounds(self): + return self.NetPounds + def set_NetPounds(self, NetPounds): + self.NetPounds = NetPounds + def get_NetOunces(self): + return self.NetOunces + def set_NetOunces(self, NetOunces): + self.NetOunces = NetOunces + def get_HSTariffNumber(self): + return self.HSTariffNumber + def set_HSTariffNumber(self, HSTariffNumber): + self.HSTariffNumber = HSTariffNumber + def get_CountryOfOrigin(self): + return self.CountryOfOrigin + def set_CountryOfOrigin(self, CountryOfOrigin): + self.CountryOfOrigin = CountryOfOrigin + def has__content(self): + if ( + self.Description is not None or + self.Quantity is not None or + self.Value is not None or + self.NetPounds is not None or + self.NetOunces is not None or + self.HSTariffNumber is not None or + self.CountryOfOrigin is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ItemDetailType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ItemDetailType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ItemDetailType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ItemDetailType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ItemDetailType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ItemDetailType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ItemDetailType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Description is not None: + namespaceprefix_ = self.Description_nsprefix_ + ':' if (UseCapturedNS_ and self.Description_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDescription>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Description), input_name='Description')), namespaceprefix_ , eol_)) + if self.Quantity is not None: + namespaceprefix_ = self.Quantity_nsprefix_ + ':' if (UseCapturedNS_ and self.Quantity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sQuantity>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Quantity, input_name='Quantity'), namespaceprefix_ , eol_)) + if self.Value is not None: + namespaceprefix_ = self.Value_nsprefix_ + ':' if (UseCapturedNS_ and self.Value_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sValue>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Value, input_name='Value'), namespaceprefix_ , eol_)) + if self.NetPounds is not None: + namespaceprefix_ = self.NetPounds_nsprefix_ + ':' if (UseCapturedNS_ and self.NetPounds_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNetPounds>%s%s' % (namespaceprefix_ , self.gds_format_float(self.NetPounds, input_name='NetPounds'), namespaceprefix_ , eol_)) + if self.NetOunces is not None: + namespaceprefix_ = self.NetOunces_nsprefix_ + ':' if (UseCapturedNS_ and self.NetOunces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNetOunces>%s%s' % (namespaceprefix_ , self.gds_format_float(self.NetOunces, input_name='NetOunces'), namespaceprefix_ , eol_)) + if self.HSTariffNumber is not None: + namespaceprefix_ = self.HSTariffNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.HSTariffNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHSTariffNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.HSTariffNumber), input_name='HSTariffNumber')), namespaceprefix_ , eol_)) + if self.CountryOfOrigin is not None: + namespaceprefix_ = self.CountryOfOrigin_nsprefix_ + ':' if (UseCapturedNS_ and self.CountryOfOrigin_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCountryOfOrigin>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CountryOfOrigin), input_name='CountryOfOrigin')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Description': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Description') + value_ = self.gds_validate_string(value_, node, 'Description') + self.Description = value_ + self.Description_nsprefix_ = child_.prefix + elif nodeName_ == 'Quantity' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Quantity') + ival_ = self.gds_validate_integer(ival_, node, 'Quantity') + self.Quantity = ival_ + self.Quantity_nsprefix_ = child_.prefix + elif nodeName_ == 'Value' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Value') + fval_ = self.gds_validate_float(fval_, node, 'Value') + self.Value = fval_ + self.Value_nsprefix_ = child_.prefix + elif nodeName_ == 'NetPounds' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'NetPounds') + fval_ = self.gds_validate_float(fval_, node, 'NetPounds') + self.NetPounds = fval_ + self.NetPounds_nsprefix_ = child_.prefix + elif nodeName_ == 'NetOunces' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'NetOunces') + fval_ = self.gds_validate_float(fval_, node, 'NetOunces') + self.NetOunces = fval_ + self.NetOunces_nsprefix_ = child_.prefix + elif nodeName_ == 'HSTariffNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'HSTariffNumber') + value_ = self.gds_validate_string(value_, node, 'HSTariffNumber') + self.HSTariffNumber = value_ + self.HSTariffNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'CountryOfOrigin': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CountryOfOrigin') + value_ = self.gds_validate_string(value_, node, 'CountryOfOrigin') + self.CountryOfOrigin = value_ + self.CountryOfOrigin_nsprefix_ = child_.prefix +# end class ItemDetailType + + +class ExtraServicesType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ExtraService=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if ExtraService is None: + self.ExtraService = [] + else: + self.ExtraService = ExtraService + self.ExtraService_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExtraServicesType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExtraServicesType.subclass: + return ExtraServicesType.subclass(*args_, **kwargs_) + else: + return ExtraServicesType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ExtraService(self): + return self.ExtraService + def set_ExtraService(self, ExtraService): + self.ExtraService = ExtraService + def add_ExtraService(self, value): + self.ExtraService.append(value) + def insert_ExtraService_at(self, index, value): + self.ExtraService.insert(index, value) + def replace_ExtraService_at(self, index, value): + self.ExtraService[index] = value + def has__content(self): + if ( + self.ExtraService + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExtraServicesType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExtraServicesType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtraServicesType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtraServicesType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtraServicesType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for ExtraService_ in self.ExtraService: + namespaceprefix_ = self.ExtraService_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraService_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sExtraService>%s%s' % (namespaceprefix_ , self.gds_format_integer(ExtraService_, input_name='ExtraService'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ExtraService' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ExtraService') + ival_ = self.gds_validate_integer(ival_, node, 'ExtraService') + self.ExtraService.append(ival_) + self.ExtraService_nsprefix_ = child_.prefix +# end class ExtraServicesType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSPriorityMailIntlRequest' + rootClass = eVSPriorityMailIntlRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSPriorityMailIntlRequest' + rootClass = eVSPriorityMailIntlRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSPriorityMailIntlRequest' + rootClass = eVSPriorityMailIntlRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSPriorityMailIntlRequest' + rootClass = eVSPriorityMailIntlRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from evs_priority_mail_intl_request import *\n\n') + sys.stdout.write('import evs_priority_mail_intl_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "ExtraServicesType", + "ImageParametersType", + "ItemDetailType", + "ShippingContentsType", + "eVSPriorityMailIntlRequest" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/evs_priority_mail_intl_response.py b/modules/connectors/usps_international/karrio/schemas/usps_international/evs_priority_mail_intl_response.py new file mode 100644 index 0000000000..3322f63554 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/evs_priority_mail_intl_response.py @@ -0,0 +1,1831 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:12 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/evs_priority_mail_intl_response.py') +# +# Command line arguments: +# ./schemas/eVSPriorityMailIntlResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/evs_priority_mail_intl_response.py" ./schemas/eVSPriorityMailIntlResponse.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class eVSPriorityMailIntlResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Postage=None, TotalValue=None, SDRValue=None, BarcodeNumber=None, LabelImage=None, Page2Image=None, Page3Image=None, Page4Image=None, Page5Image=None, Page6Image=None, Prohibitions=None, Restrictions=None, Observations=None, Regulations=None, AdditionalRestrictions=None, ParcelIndemnityCoverage=None, ExtraServices=None, RemainingBarcodes=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Postage = Postage + self.Postage_nsprefix_ = None + self.TotalValue = TotalValue + self.TotalValue_nsprefix_ = None + self.SDRValue = SDRValue + self.SDRValue_nsprefix_ = None + self.BarcodeNumber = BarcodeNumber + self.BarcodeNumber_nsprefix_ = None + self.LabelImage = LabelImage + self.LabelImage_nsprefix_ = None + self.Page2Image = Page2Image + self.Page2Image_nsprefix_ = None + self.Page3Image = Page3Image + self.Page3Image_nsprefix_ = None + self.Page4Image = Page4Image + self.Page4Image_nsprefix_ = None + self.Page5Image = Page5Image + self.Page5Image_nsprefix_ = None + self.Page6Image = Page6Image + self.Page6Image_nsprefix_ = None + self.Prohibitions = Prohibitions + self.Prohibitions_nsprefix_ = None + self.Restrictions = Restrictions + self.Restrictions_nsprefix_ = None + self.Observations = Observations + self.Observations_nsprefix_ = None + self.Regulations = Regulations + self.Regulations_nsprefix_ = None + self.AdditionalRestrictions = AdditionalRestrictions + self.AdditionalRestrictions_nsprefix_ = None + self.ParcelIndemnityCoverage = ParcelIndemnityCoverage + self.ParcelIndemnityCoverage_nsprefix_ = None + self.ExtraServices = ExtraServices + self.ExtraServices_nsprefix_ = None + self.RemainingBarcodes = RemainingBarcodes + self.RemainingBarcodes_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, eVSPriorityMailIntlResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if eVSPriorityMailIntlResponse.subclass: + return eVSPriorityMailIntlResponse.subclass(*args_, **kwargs_) + else: + return eVSPriorityMailIntlResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Postage(self): + return self.Postage + def set_Postage(self, Postage): + self.Postage = Postage + def get_TotalValue(self): + return self.TotalValue + def set_TotalValue(self, TotalValue): + self.TotalValue = TotalValue + def get_SDRValue(self): + return self.SDRValue + def set_SDRValue(self, SDRValue): + self.SDRValue = SDRValue + def get_BarcodeNumber(self): + return self.BarcodeNumber + def set_BarcodeNumber(self, BarcodeNumber): + self.BarcodeNumber = BarcodeNumber + def get_LabelImage(self): + return self.LabelImage + def set_LabelImage(self, LabelImage): + self.LabelImage = LabelImage + def get_Page2Image(self): + return self.Page2Image + def set_Page2Image(self, Page2Image): + self.Page2Image = Page2Image + def get_Page3Image(self): + return self.Page3Image + def set_Page3Image(self, Page3Image): + self.Page3Image = Page3Image + def get_Page4Image(self): + return self.Page4Image + def set_Page4Image(self, Page4Image): + self.Page4Image = Page4Image + def get_Page5Image(self): + return self.Page5Image + def set_Page5Image(self, Page5Image): + self.Page5Image = Page5Image + def get_Page6Image(self): + return self.Page6Image + def set_Page6Image(self, Page6Image): + self.Page6Image = Page6Image + def get_Prohibitions(self): + return self.Prohibitions + def set_Prohibitions(self, Prohibitions): + self.Prohibitions = Prohibitions + def get_Restrictions(self): + return self.Restrictions + def set_Restrictions(self, Restrictions): + self.Restrictions = Restrictions + def get_Observations(self): + return self.Observations + def set_Observations(self, Observations): + self.Observations = Observations + def get_Regulations(self): + return self.Regulations + def set_Regulations(self, Regulations): + self.Regulations = Regulations + def get_AdditionalRestrictions(self): + return self.AdditionalRestrictions + def set_AdditionalRestrictions(self, AdditionalRestrictions): + self.AdditionalRestrictions = AdditionalRestrictions + def get_ParcelIndemnityCoverage(self): + return self.ParcelIndemnityCoverage + def set_ParcelIndemnityCoverage(self, ParcelIndemnityCoverage): + self.ParcelIndemnityCoverage = ParcelIndemnityCoverage + def get_ExtraServices(self): + return self.ExtraServices + def set_ExtraServices(self, ExtraServices): + self.ExtraServices = ExtraServices + def get_RemainingBarcodes(self): + return self.RemainingBarcodes + def set_RemainingBarcodes(self, RemainingBarcodes): + self.RemainingBarcodes = RemainingBarcodes + def has__content(self): + if ( + self.Postage is not None or + self.TotalValue is not None or + self.SDRValue is not None or + self.BarcodeNumber is not None or + self.LabelImage is not None or + self.Page2Image is not None or + self.Page3Image is not None or + self.Page4Image is not None or + self.Page5Image is not None or + self.Page6Image is not None or + self.Prohibitions is not None or + self.Restrictions is not None or + self.Observations is not None or + self.Regulations is not None or + self.AdditionalRestrictions is not None or + self.ParcelIndemnityCoverage is not None or + self.ExtraServices is not None or + self.RemainingBarcodes is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSPriorityMailIntlResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('eVSPriorityMailIntlResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'eVSPriorityMailIntlResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='eVSPriorityMailIntlResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='eVSPriorityMailIntlResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='eVSPriorityMailIntlResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSPriorityMailIntlResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Postage is not None: + namespaceprefix_ = self.Postage_nsprefix_ + ':' if (UseCapturedNS_ and self.Postage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPostage>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Postage, input_name='Postage'), namespaceprefix_ , eol_)) + if self.TotalValue is not None: + namespaceprefix_ = self.TotalValue_nsprefix_ + ':' if (UseCapturedNS_ and self.TotalValue_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTotalValue>%s%s' % (namespaceprefix_ , self.gds_format_float(self.TotalValue, input_name='TotalValue'), namespaceprefix_ , eol_)) + if self.SDRValue is not None: + namespaceprefix_ = self.SDRValue_nsprefix_ + ':' if (UseCapturedNS_ and self.SDRValue_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSDRValue>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SDRValue), input_name='SDRValue')), namespaceprefix_ , eol_)) + if self.BarcodeNumber is not None: + namespaceprefix_ = self.BarcodeNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.BarcodeNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBarcodeNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BarcodeNumber), input_name='BarcodeNumber')), namespaceprefix_ , eol_)) + if self.LabelImage is not None: + namespaceprefix_ = self.LabelImage_nsprefix_ + ':' if (UseCapturedNS_ and self.LabelImage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLabelImage>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LabelImage), input_name='LabelImage')), namespaceprefix_ , eol_)) + if self.Page2Image is not None: + namespaceprefix_ = self.Page2Image_nsprefix_ + ':' if (UseCapturedNS_ and self.Page2Image_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPage2Image>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Page2Image), input_name='Page2Image')), namespaceprefix_ , eol_)) + if self.Page3Image is not None: + namespaceprefix_ = self.Page3Image_nsprefix_ + ':' if (UseCapturedNS_ and self.Page3Image_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPage3Image>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Page3Image), input_name='Page3Image')), namespaceprefix_ , eol_)) + if self.Page4Image is not None: + namespaceprefix_ = self.Page4Image_nsprefix_ + ':' if (UseCapturedNS_ and self.Page4Image_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPage4Image>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Page4Image), input_name='Page4Image')), namespaceprefix_ , eol_)) + if self.Page5Image is not None: + namespaceprefix_ = self.Page5Image_nsprefix_ + ':' if (UseCapturedNS_ and self.Page5Image_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPage5Image>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Page5Image), input_name='Page5Image')), namespaceprefix_ , eol_)) + if self.Page6Image is not None: + namespaceprefix_ = self.Page6Image_nsprefix_ + ':' if (UseCapturedNS_ and self.Page6Image_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPage6Image>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Page6Image), input_name='Page6Image')), namespaceprefix_ , eol_)) + if self.Prohibitions is not None: + namespaceprefix_ = self.Prohibitions_nsprefix_ + ':' if (UseCapturedNS_ and self.Prohibitions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sProhibitions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Prohibitions), input_name='Prohibitions')), namespaceprefix_ , eol_)) + if self.Restrictions is not None: + namespaceprefix_ = self.Restrictions_nsprefix_ + ':' if (UseCapturedNS_ and self.Restrictions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRestrictions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Restrictions), input_name='Restrictions')), namespaceprefix_ , eol_)) + if self.Observations is not None: + namespaceprefix_ = self.Observations_nsprefix_ + ':' if (UseCapturedNS_ and self.Observations_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sObservations>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Observations), input_name='Observations')), namespaceprefix_ , eol_)) + if self.Regulations is not None: + namespaceprefix_ = self.Regulations_nsprefix_ + ':' if (UseCapturedNS_ and self.Regulations_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRegulations>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Regulations), input_name='Regulations')), namespaceprefix_ , eol_)) + if self.AdditionalRestrictions is not None: + namespaceprefix_ = self.AdditionalRestrictions_nsprefix_ + ':' if (UseCapturedNS_ and self.AdditionalRestrictions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAdditionalRestrictions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AdditionalRestrictions), input_name='AdditionalRestrictions')), namespaceprefix_ , eol_)) + if self.ParcelIndemnityCoverage is not None: + namespaceprefix_ = self.ParcelIndemnityCoverage_nsprefix_ + ':' if (UseCapturedNS_ and self.ParcelIndemnityCoverage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sParcelIndemnityCoverage>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ParcelIndemnityCoverage, input_name='ParcelIndemnityCoverage'), namespaceprefix_ , eol_)) + if self.ExtraServices is not None: + namespaceprefix_ = self.ExtraServices_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraServices_nsprefix_) else '' + self.ExtraServices.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ExtraServices', pretty_print=pretty_print) + if self.RemainingBarcodes is not None: + namespaceprefix_ = self.RemainingBarcodes_nsprefix_ + ':' if (UseCapturedNS_ and self.RemainingBarcodes_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRemainingBarcodes>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.RemainingBarcodes, input_name='RemainingBarcodes'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Postage' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Postage') + fval_ = self.gds_validate_float(fval_, node, 'Postage') + self.Postage = fval_ + self.Postage_nsprefix_ = child_.prefix + elif nodeName_ == 'TotalValue' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'TotalValue') + fval_ = self.gds_validate_float(fval_, node, 'TotalValue') + self.TotalValue = fval_ + self.TotalValue_nsprefix_ = child_.prefix + elif nodeName_ == 'SDRValue': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SDRValue') + value_ = self.gds_validate_string(value_, node, 'SDRValue') + self.SDRValue = value_ + self.SDRValue_nsprefix_ = child_.prefix + elif nodeName_ == 'BarcodeNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BarcodeNumber') + value_ = self.gds_validate_string(value_, node, 'BarcodeNumber') + self.BarcodeNumber = value_ + self.BarcodeNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'LabelImage': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LabelImage') + value_ = self.gds_validate_string(value_, node, 'LabelImage') + self.LabelImage = value_ + self.LabelImage_nsprefix_ = child_.prefix + elif nodeName_ == 'Page2Image': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Page2Image') + value_ = self.gds_validate_string(value_, node, 'Page2Image') + self.Page2Image = value_ + self.Page2Image_nsprefix_ = child_.prefix + elif nodeName_ == 'Page3Image': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Page3Image') + value_ = self.gds_validate_string(value_, node, 'Page3Image') + self.Page3Image = value_ + self.Page3Image_nsprefix_ = child_.prefix + elif nodeName_ == 'Page4Image': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Page4Image') + value_ = self.gds_validate_string(value_, node, 'Page4Image') + self.Page4Image = value_ + self.Page4Image_nsprefix_ = child_.prefix + elif nodeName_ == 'Page5Image': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Page5Image') + value_ = self.gds_validate_string(value_, node, 'Page5Image') + self.Page5Image = value_ + self.Page5Image_nsprefix_ = child_.prefix + elif nodeName_ == 'Page6Image': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Page6Image') + value_ = self.gds_validate_string(value_, node, 'Page6Image') + self.Page6Image = value_ + self.Page6Image_nsprefix_ = child_.prefix + elif nodeName_ == 'Prohibitions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Prohibitions') + value_ = self.gds_validate_string(value_, node, 'Prohibitions') + self.Prohibitions = value_ + self.Prohibitions_nsprefix_ = child_.prefix + elif nodeName_ == 'Restrictions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Restrictions') + value_ = self.gds_validate_string(value_, node, 'Restrictions') + self.Restrictions = value_ + self.Restrictions_nsprefix_ = child_.prefix + elif nodeName_ == 'Observations': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Observations') + value_ = self.gds_validate_string(value_, node, 'Observations') + self.Observations = value_ + self.Observations_nsprefix_ = child_.prefix + elif nodeName_ == 'Regulations': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Regulations') + value_ = self.gds_validate_string(value_, node, 'Regulations') + self.Regulations = value_ + self.Regulations_nsprefix_ = child_.prefix + elif nodeName_ == 'AdditionalRestrictions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AdditionalRestrictions') + value_ = self.gds_validate_string(value_, node, 'AdditionalRestrictions') + self.AdditionalRestrictions = value_ + self.AdditionalRestrictions_nsprefix_ = child_.prefix + elif nodeName_ == 'ParcelIndemnityCoverage' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'ParcelIndemnityCoverage') + fval_ = self.gds_validate_float(fval_, node, 'ParcelIndemnityCoverage') + self.ParcelIndemnityCoverage = fval_ + self.ParcelIndemnityCoverage_nsprefix_ = child_.prefix + elif nodeName_ == 'ExtraServices': + obj_ = ExtraServicesType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ExtraServices = obj_ + obj_.original_tagname_ = 'ExtraServices' + elif nodeName_ == 'RemainingBarcodes' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'RemainingBarcodes') + ival_ = self.gds_validate_integer(ival_, node, 'RemainingBarcodes') + self.RemainingBarcodes = ival_ + self.RemainingBarcodes_nsprefix_ = child_.prefix +# end class eVSPriorityMailIntlResponse + + +class ExtraServicesType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ExtraService=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if ExtraService is None: + self.ExtraService = [] + else: + self.ExtraService = ExtraService + self.ExtraService_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExtraServicesType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExtraServicesType.subclass: + return ExtraServicesType.subclass(*args_, **kwargs_) + else: + return ExtraServicesType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ExtraService(self): + return self.ExtraService + def set_ExtraService(self, ExtraService): + self.ExtraService = ExtraService + def add_ExtraService(self, value): + self.ExtraService.append(value) + def insert_ExtraService_at(self, index, value): + self.ExtraService.insert(index, value) + def replace_ExtraService_at(self, index, value): + self.ExtraService[index] = value + def has__content(self): + if ( + self.ExtraService + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExtraServicesType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExtraServicesType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtraServicesType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtraServicesType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtraServicesType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for ExtraService_ in self.ExtraService: + namespaceprefix_ = self.ExtraService_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraService_nsprefix_) else '' + ExtraService_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ExtraService', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ExtraService': + obj_ = ExtraServiceType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ExtraService.append(obj_) + obj_.original_tagname_ = 'ExtraService' +# end class ExtraServicesType + + +class ExtraServiceType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ServiceID=None, ServiceName=None, Price=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ServiceID = ServiceID + self.ServiceID_nsprefix_ = None + self.ServiceName = ServiceName + self.ServiceName_nsprefix_ = None + self.Price = Price + self.Price_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExtraServiceType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExtraServiceType.subclass: + return ExtraServiceType.subclass(*args_, **kwargs_) + else: + return ExtraServiceType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ServiceID(self): + return self.ServiceID + def set_ServiceID(self, ServiceID): + self.ServiceID = ServiceID + def get_ServiceName(self): + return self.ServiceName + def set_ServiceName(self, ServiceName): + self.ServiceName = ServiceName + def get_Price(self): + return self.Price + def set_Price(self, Price): + self.Price = Price + def has__content(self): + if ( + self.ServiceID is not None or + self.ServiceName is not None or + self.Price is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServiceType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExtraServiceType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExtraServiceType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtraServiceType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtraServiceType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtraServiceType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServiceType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ServiceID is not None: + namespaceprefix_ = self.ServiceID_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceID>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ServiceID, input_name='ServiceID'), namespaceprefix_ , eol_)) + if self.ServiceName is not None: + namespaceprefix_ = self.ServiceName_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceName), input_name='ServiceName')), namespaceprefix_ , eol_)) + if self.Price is not None: + namespaceprefix_ = self.Price_nsprefix_ + ':' if (UseCapturedNS_ and self.Price_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPrice>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Price, input_name='Price'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ServiceID' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ServiceID') + ival_ = self.gds_validate_integer(ival_, node, 'ServiceID') + self.ServiceID = ival_ + self.ServiceID_nsprefix_ = child_.prefix + elif nodeName_ == 'ServiceName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceName') + value_ = self.gds_validate_string(value_, node, 'ServiceName') + self.ServiceName = value_ + self.ServiceName_nsprefix_ = child_.prefix + elif nodeName_ == 'Price' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Price') + fval_ = self.gds_validate_float(fval_, node, 'Price') + self.Price = fval_ + self.Price_nsprefix_ = child_.prefix +# end class ExtraServiceType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSPriorityMailIntlResponse' + rootClass = eVSPriorityMailIntlResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSPriorityMailIntlResponse' + rootClass = eVSPriorityMailIntlResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSPriorityMailIntlResponse' + rootClass = eVSPriorityMailIntlResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSPriorityMailIntlResponse' + rootClass = eVSPriorityMailIntlResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from evs_priority_mail_intl_response import *\n\n') + sys.stdout.write('import evs_priority_mail_intl_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "ExtraServiceType", + "ExtraServicesType", + "eVSPriorityMailIntlResponse" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/evs_request.py b/modules/connectors/usps_international/karrio/schemas/usps_international/evs_request.py new file mode 100644 index 0000000000..5afd524d77 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/evs_request.py @@ -0,0 +1,4034 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:10 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/evs_request.py') +# +# Command line arguments: +# ./schemas/eVSRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/evs_request.py" ./schemas/eVSRequest.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class eVSRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, Option=None, Revision=None, ImageParameters=None, FromName=None, FromFirm=None, FromAddress1=None, FromAddress2=None, FromCity=None, FromState=None, FromZip5=None, FromZip4=None, FromPhone=None, POZipCode=None, AllowNonCleansedOriginAddr=None, ToName=None, ToFirm=None, ToAddress1=None, ToAddress2=None, ToCity=None, ToState=None, ToZip5=None, ToZip4=None, ToPhone=None, POBox=None, ToContactPreference=None, ToContactMessaging=None, ToContactEmail=None, AllowNonCleansedDestAddr=None, WeightInOunces=None, ServiceType=None, Container=None, Width=None, Length=None, Height=None, Girth=None, Machinable=None, ProcessingCategory=None, PriceOptions=None, InsuredAmount=None, AddressServiceRequested=None, ExpressMailOptions=None, ShipDate=None, CustomerRefNo=None, CustomerRefNo2=None, ExtraServices=None, HoldForPickup=None, OpenDistribute=None, PermitNumber=None, PermitZIPCode=None, PermitHolderName=None, CRID=None, MID=None, LogisticsManagerMID=None, VendorCode=None, VendorProductVersionNumber=None, SenderName=None, SenderEMail=None, RecipientName=None, RecipientEMail=None, ReceiptOption=None, ImageType=None, HoldForManifest=None, NineDigitRoutingZip=None, ShipInfo=None, CarrierRelease=None, DropOffTime=None, ReturnCommitments=None, PrintCustomerRefNo=None, Content=None, ActionCode=None, OptOutOfSPE=None, SortationLevel=None, DestinationEntryFacilityType=None, ShippingContents=None, CustomsContentType=None, ContentComments=None, RestrictionType=None, RestrictionComments=None, AESITN=None, ImportersReference=None, ImportersContact=None, ExportersReference=None, ExportersContact=None, InvoiceNumber=None, LicenseNumber=None, CertificateNumber=None, NonDeliveryOption=None, AltReturnAddress1=None, AltReturnAddress2=None, AltReturnAddress3=None, AltReturnAddress4=None, AltReturnAddress5=None, AltReturnAddress6=None, AltReturnCountry=None, LabelImportType=None, ePostageMailerReporting=None, SenderFirstName=None, SenderLastName=None, SenderBusinessName=None, SenderAddress1=None, SenderCity=None, SenderState=None, SenderZip5=None, SenderPhone=None, ChargebackCode=None, TrackingRetentionPeriod=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.Option = Option + self.Option_nsprefix_ = None + self.Revision = Revision + self.Revision_nsprefix_ = None + self.ImageParameters = ImageParameters + self.ImageParameters_nsprefix_ = None + self.FromName = FromName + self.FromName_nsprefix_ = None + self.FromFirm = FromFirm + self.FromFirm_nsprefix_ = None + self.FromAddress1 = FromAddress1 + self.FromAddress1_nsprefix_ = None + self.FromAddress2 = FromAddress2 + self.FromAddress2_nsprefix_ = None + self.FromCity = FromCity + self.FromCity_nsprefix_ = None + self.FromState = FromState + self.FromState_nsprefix_ = None + self.FromZip5 = FromZip5 + self.FromZip5_nsprefix_ = None + self.FromZip4 = FromZip4 + self.FromZip4_nsprefix_ = None + self.FromPhone = FromPhone + self.FromPhone_nsprefix_ = None + self.POZipCode = POZipCode + self.POZipCode_nsprefix_ = None + self.AllowNonCleansedOriginAddr = AllowNonCleansedOriginAddr + self.AllowNonCleansedOriginAddr_nsprefix_ = None + self.ToName = ToName + self.ToName_nsprefix_ = None + self.ToFirm = ToFirm + self.ToFirm_nsprefix_ = None + self.ToAddress1 = ToAddress1 + self.ToAddress1_nsprefix_ = None + self.ToAddress2 = ToAddress2 + self.ToAddress2_nsprefix_ = None + self.ToCity = ToCity + self.ToCity_nsprefix_ = None + self.ToState = ToState + self.ToState_nsprefix_ = None + self.ToZip5 = ToZip5 + self.ToZip5_nsprefix_ = None + self.ToZip4 = ToZip4 + self.ToZip4_nsprefix_ = None + self.ToPhone = ToPhone + self.ToPhone_nsprefix_ = None + self.POBox = POBox + self.POBox_nsprefix_ = None + self.ToContactPreference = ToContactPreference + self.ToContactPreference_nsprefix_ = None + self.ToContactMessaging = ToContactMessaging + self.ToContactMessaging_nsprefix_ = None + self.ToContactEmail = ToContactEmail + self.ToContactEmail_nsprefix_ = None + self.AllowNonCleansedDestAddr = AllowNonCleansedDestAddr + self.AllowNonCleansedDestAddr_nsprefix_ = None + self.WeightInOunces = WeightInOunces + self.WeightInOunces_nsprefix_ = None + self.ServiceType = ServiceType + self.ServiceType_nsprefix_ = None + self.Container = Container + self.Container_nsprefix_ = None + self.Width = Width + self.Width_nsprefix_ = None + self.Length = Length + self.Length_nsprefix_ = None + self.Height = Height + self.Height_nsprefix_ = None + self.Girth = Girth + self.Girth_nsprefix_ = None + self.Machinable = Machinable + self.Machinable_nsprefix_ = None + self.ProcessingCategory = ProcessingCategory + self.ProcessingCategory_nsprefix_ = None + self.PriceOptions = PriceOptions + self.PriceOptions_nsprefix_ = None + self.InsuredAmount = InsuredAmount + self.InsuredAmount_nsprefix_ = None + self.AddressServiceRequested = AddressServiceRequested + self.AddressServiceRequested_nsprefix_ = None + self.ExpressMailOptions = ExpressMailOptions + self.ExpressMailOptions_nsprefix_ = None + self.ShipDate = ShipDate + self.validate_ShipDateType(self.ShipDate) + self.ShipDate_nsprefix_ = None + self.CustomerRefNo = CustomerRefNo + self.CustomerRefNo_nsprefix_ = None + self.CustomerRefNo2 = CustomerRefNo2 + self.CustomerRefNo2_nsprefix_ = None + self.ExtraServices = ExtraServices + self.ExtraServices_nsprefix_ = None + self.HoldForPickup = HoldForPickup + self.HoldForPickup_nsprefix_ = None + self.OpenDistribute = OpenDistribute + self.OpenDistribute_nsprefix_ = None + self.PermitNumber = PermitNumber + self.PermitNumber_nsprefix_ = None + self.PermitZIPCode = PermitZIPCode + self.PermitZIPCode_nsprefix_ = None + self.PermitHolderName = PermitHolderName + self.PermitHolderName_nsprefix_ = None + self.CRID = CRID + self.CRID_nsprefix_ = None + self.MID = MID + self.MID_nsprefix_ = None + self.LogisticsManagerMID = LogisticsManagerMID + self.LogisticsManagerMID_nsprefix_ = None + self.VendorCode = VendorCode + self.VendorCode_nsprefix_ = None + self.VendorProductVersionNumber = VendorProductVersionNumber + self.VendorProductVersionNumber_nsprefix_ = None + self.SenderName = SenderName + self.SenderName_nsprefix_ = None + self.SenderEMail = SenderEMail + self.SenderEMail_nsprefix_ = None + self.RecipientName = RecipientName + self.RecipientName_nsprefix_ = None + self.RecipientEMail = RecipientEMail + self.RecipientEMail_nsprefix_ = None + self.ReceiptOption = ReceiptOption + self.ReceiptOption_nsprefix_ = None + self.ImageType = ImageType + self.ImageType_nsprefix_ = None + self.HoldForManifest = HoldForManifest + self.HoldForManifest_nsprefix_ = None + self.NineDigitRoutingZip = NineDigitRoutingZip + self.NineDigitRoutingZip_nsprefix_ = None + self.ShipInfo = ShipInfo + self.ShipInfo_nsprefix_ = None + self.CarrierRelease = CarrierRelease + self.CarrierRelease_nsprefix_ = None + self.DropOffTime = DropOffTime + self.DropOffTime_nsprefix_ = None + self.ReturnCommitments = ReturnCommitments + self.ReturnCommitments_nsprefix_ = None + self.PrintCustomerRefNo = PrintCustomerRefNo + self.PrintCustomerRefNo_nsprefix_ = None + self.Content = Content + self.Content_nsprefix_ = None + self.ActionCode = ActionCode + self.ActionCode_nsprefix_ = None + self.OptOutOfSPE = OptOutOfSPE + self.OptOutOfSPE_nsprefix_ = None + self.SortationLevel = SortationLevel + self.SortationLevel_nsprefix_ = None + self.DestinationEntryFacilityType = DestinationEntryFacilityType + self.DestinationEntryFacilityType_nsprefix_ = None + self.ShippingContents = ShippingContents + self.ShippingContents_nsprefix_ = None + self.CustomsContentType = CustomsContentType + self.CustomsContentType_nsprefix_ = None + self.ContentComments = ContentComments + self.ContentComments_nsprefix_ = None + self.RestrictionType = RestrictionType + self.RestrictionType_nsprefix_ = None + self.RestrictionComments = RestrictionComments + self.RestrictionComments_nsprefix_ = None + self.AESITN = AESITN + self.AESITN_nsprefix_ = None + self.ImportersReference = ImportersReference + self.ImportersReference_nsprefix_ = None + self.ImportersContact = ImportersContact + self.ImportersContact_nsprefix_ = None + self.ExportersReference = ExportersReference + self.ExportersReference_nsprefix_ = None + self.ExportersContact = ExportersContact + self.ExportersContact_nsprefix_ = None + self.InvoiceNumber = InvoiceNumber + self.InvoiceNumber_nsprefix_ = None + self.LicenseNumber = LicenseNumber + self.LicenseNumber_nsprefix_ = None + self.CertificateNumber = CertificateNumber + self.CertificateNumber_nsprefix_ = None + self.NonDeliveryOption = NonDeliveryOption + self.NonDeliveryOption_nsprefix_ = None + self.AltReturnAddress1 = AltReturnAddress1 + self.AltReturnAddress1_nsprefix_ = None + self.AltReturnAddress2 = AltReturnAddress2 + self.AltReturnAddress2_nsprefix_ = None + self.AltReturnAddress3 = AltReturnAddress3 + self.AltReturnAddress3_nsprefix_ = None + self.AltReturnAddress4 = AltReturnAddress4 + self.AltReturnAddress4_nsprefix_ = None + self.AltReturnAddress5 = AltReturnAddress5 + self.AltReturnAddress5_nsprefix_ = None + self.AltReturnAddress6 = AltReturnAddress6 + self.AltReturnAddress6_nsprefix_ = None + self.AltReturnCountry = AltReturnCountry + self.AltReturnCountry_nsprefix_ = None + self.LabelImportType = LabelImportType + self.LabelImportType_nsprefix_ = None + self.ePostageMailerReporting = ePostageMailerReporting + self.ePostageMailerReporting_nsprefix_ = None + self.SenderFirstName = SenderFirstName + self.SenderFirstName_nsprefix_ = None + self.SenderLastName = SenderLastName + self.SenderLastName_nsprefix_ = None + self.SenderBusinessName = SenderBusinessName + self.SenderBusinessName_nsprefix_ = None + self.SenderAddress1 = SenderAddress1 + self.SenderAddress1_nsprefix_ = None + self.SenderCity = SenderCity + self.SenderCity_nsprefix_ = None + self.SenderState = SenderState + self.SenderState_nsprefix_ = None + self.SenderZip5 = SenderZip5 + self.SenderZip5_nsprefix_ = None + self.SenderPhone = SenderPhone + self.SenderPhone_nsprefix_ = None + self.ChargebackCode = ChargebackCode + self.ChargebackCode_nsprefix_ = None + self.TrackingRetentionPeriod = TrackingRetentionPeriod + self.TrackingRetentionPeriod_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, eVSRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if eVSRequest.subclass: + return eVSRequest.subclass(*args_, **kwargs_) + else: + return eVSRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Option(self): + return self.Option + def set_Option(self, Option): + self.Option = Option + def get_Revision(self): + return self.Revision + def set_Revision(self, Revision): + self.Revision = Revision + def get_ImageParameters(self): + return self.ImageParameters + def set_ImageParameters(self, ImageParameters): + self.ImageParameters = ImageParameters + def get_FromName(self): + return self.FromName + def set_FromName(self, FromName): + self.FromName = FromName + def get_FromFirm(self): + return self.FromFirm + def set_FromFirm(self, FromFirm): + self.FromFirm = FromFirm + def get_FromAddress1(self): + return self.FromAddress1 + def set_FromAddress1(self, FromAddress1): + self.FromAddress1 = FromAddress1 + def get_FromAddress2(self): + return self.FromAddress2 + def set_FromAddress2(self, FromAddress2): + self.FromAddress2 = FromAddress2 + def get_FromCity(self): + return self.FromCity + def set_FromCity(self, FromCity): + self.FromCity = FromCity + def get_FromState(self): + return self.FromState + def set_FromState(self, FromState): + self.FromState = FromState + def get_FromZip5(self): + return self.FromZip5 + def set_FromZip5(self, FromZip5): + self.FromZip5 = FromZip5 + def get_FromZip4(self): + return self.FromZip4 + def set_FromZip4(self, FromZip4): + self.FromZip4 = FromZip4 + def get_FromPhone(self): + return self.FromPhone + def set_FromPhone(self, FromPhone): + self.FromPhone = FromPhone + def get_POZipCode(self): + return self.POZipCode + def set_POZipCode(self, POZipCode): + self.POZipCode = POZipCode + def get_AllowNonCleansedOriginAddr(self): + return self.AllowNonCleansedOriginAddr + def set_AllowNonCleansedOriginAddr(self, AllowNonCleansedOriginAddr): + self.AllowNonCleansedOriginAddr = AllowNonCleansedOriginAddr + def get_ToName(self): + return self.ToName + def set_ToName(self, ToName): + self.ToName = ToName + def get_ToFirm(self): + return self.ToFirm + def set_ToFirm(self, ToFirm): + self.ToFirm = ToFirm + def get_ToAddress1(self): + return self.ToAddress1 + def set_ToAddress1(self, ToAddress1): + self.ToAddress1 = ToAddress1 + def get_ToAddress2(self): + return self.ToAddress2 + def set_ToAddress2(self, ToAddress2): + self.ToAddress2 = ToAddress2 + def get_ToCity(self): + return self.ToCity + def set_ToCity(self, ToCity): + self.ToCity = ToCity + def get_ToState(self): + return self.ToState + def set_ToState(self, ToState): + self.ToState = ToState + def get_ToZip5(self): + return self.ToZip5 + def set_ToZip5(self, ToZip5): + self.ToZip5 = ToZip5 + def get_ToZip4(self): + return self.ToZip4 + def set_ToZip4(self, ToZip4): + self.ToZip4 = ToZip4 + def get_ToPhone(self): + return self.ToPhone + def set_ToPhone(self, ToPhone): + self.ToPhone = ToPhone + def get_POBox(self): + return self.POBox + def set_POBox(self, POBox): + self.POBox = POBox + def get_ToContactPreference(self): + return self.ToContactPreference + def set_ToContactPreference(self, ToContactPreference): + self.ToContactPreference = ToContactPreference + def get_ToContactMessaging(self): + return self.ToContactMessaging + def set_ToContactMessaging(self, ToContactMessaging): + self.ToContactMessaging = ToContactMessaging + def get_ToContactEmail(self): + return self.ToContactEmail + def set_ToContactEmail(self, ToContactEmail): + self.ToContactEmail = ToContactEmail + def get_AllowNonCleansedDestAddr(self): + return self.AllowNonCleansedDestAddr + def set_AllowNonCleansedDestAddr(self, AllowNonCleansedDestAddr): + self.AllowNonCleansedDestAddr = AllowNonCleansedDestAddr + def get_WeightInOunces(self): + return self.WeightInOunces + def set_WeightInOunces(self, WeightInOunces): + self.WeightInOunces = WeightInOunces + def get_ServiceType(self): + return self.ServiceType + def set_ServiceType(self, ServiceType): + self.ServiceType = ServiceType + def get_Container(self): + return self.Container + def set_Container(self, Container): + self.Container = Container + def get_Width(self): + return self.Width + def set_Width(self, Width): + self.Width = Width + def get_Length(self): + return self.Length + def set_Length(self, Length): + self.Length = Length + def get_Height(self): + return self.Height + def set_Height(self, Height): + self.Height = Height + def get_Girth(self): + return self.Girth + def set_Girth(self, Girth): + self.Girth = Girth + def get_Machinable(self): + return self.Machinable + def set_Machinable(self, Machinable): + self.Machinable = Machinable + def get_ProcessingCategory(self): + return self.ProcessingCategory + def set_ProcessingCategory(self, ProcessingCategory): + self.ProcessingCategory = ProcessingCategory + def get_PriceOptions(self): + return self.PriceOptions + def set_PriceOptions(self, PriceOptions): + self.PriceOptions = PriceOptions + def get_InsuredAmount(self): + return self.InsuredAmount + def set_InsuredAmount(self, InsuredAmount): + self.InsuredAmount = InsuredAmount + def get_AddressServiceRequested(self): + return self.AddressServiceRequested + def set_AddressServiceRequested(self, AddressServiceRequested): + self.AddressServiceRequested = AddressServiceRequested + def get_ExpressMailOptions(self): + return self.ExpressMailOptions + def set_ExpressMailOptions(self, ExpressMailOptions): + self.ExpressMailOptions = ExpressMailOptions + def get_ShipDate(self): + return self.ShipDate + def set_ShipDate(self, ShipDate): + self.ShipDate = ShipDate + def get_CustomerRefNo(self): + return self.CustomerRefNo + def set_CustomerRefNo(self, CustomerRefNo): + self.CustomerRefNo = CustomerRefNo + def get_CustomerRefNo2(self): + return self.CustomerRefNo2 + def set_CustomerRefNo2(self, CustomerRefNo2): + self.CustomerRefNo2 = CustomerRefNo2 + def get_ExtraServices(self): + return self.ExtraServices + def set_ExtraServices(self, ExtraServices): + self.ExtraServices = ExtraServices + def get_HoldForPickup(self): + return self.HoldForPickup + def set_HoldForPickup(self, HoldForPickup): + self.HoldForPickup = HoldForPickup + def get_OpenDistribute(self): + return self.OpenDistribute + def set_OpenDistribute(self, OpenDistribute): + self.OpenDistribute = OpenDistribute + def get_PermitNumber(self): + return self.PermitNumber + def set_PermitNumber(self, PermitNumber): + self.PermitNumber = PermitNumber + def get_PermitZIPCode(self): + return self.PermitZIPCode + def set_PermitZIPCode(self, PermitZIPCode): + self.PermitZIPCode = PermitZIPCode + def get_PermitHolderName(self): + return self.PermitHolderName + def set_PermitHolderName(self, PermitHolderName): + self.PermitHolderName = PermitHolderName + def get_CRID(self): + return self.CRID + def set_CRID(self, CRID): + self.CRID = CRID + def get_MID(self): + return self.MID + def set_MID(self, MID): + self.MID = MID + def get_LogisticsManagerMID(self): + return self.LogisticsManagerMID + def set_LogisticsManagerMID(self, LogisticsManagerMID): + self.LogisticsManagerMID = LogisticsManagerMID + def get_VendorCode(self): + return self.VendorCode + def set_VendorCode(self, VendorCode): + self.VendorCode = VendorCode + def get_VendorProductVersionNumber(self): + return self.VendorProductVersionNumber + def set_VendorProductVersionNumber(self, VendorProductVersionNumber): + self.VendorProductVersionNumber = VendorProductVersionNumber + def get_SenderName(self): + return self.SenderName + def set_SenderName(self, SenderName): + self.SenderName = SenderName + def get_SenderEMail(self): + return self.SenderEMail + def set_SenderEMail(self, SenderEMail): + self.SenderEMail = SenderEMail + def get_RecipientName(self): + return self.RecipientName + def set_RecipientName(self, RecipientName): + self.RecipientName = RecipientName + def get_RecipientEMail(self): + return self.RecipientEMail + def set_RecipientEMail(self, RecipientEMail): + self.RecipientEMail = RecipientEMail + def get_ReceiptOption(self): + return self.ReceiptOption + def set_ReceiptOption(self, ReceiptOption): + self.ReceiptOption = ReceiptOption + def get_ImageType(self): + return self.ImageType + def set_ImageType(self, ImageType): + self.ImageType = ImageType + def get_HoldForManifest(self): + return self.HoldForManifest + def set_HoldForManifest(self, HoldForManifest): + self.HoldForManifest = HoldForManifest + def get_NineDigitRoutingZip(self): + return self.NineDigitRoutingZip + def set_NineDigitRoutingZip(self, NineDigitRoutingZip): + self.NineDigitRoutingZip = NineDigitRoutingZip + def get_ShipInfo(self): + return self.ShipInfo + def set_ShipInfo(self, ShipInfo): + self.ShipInfo = ShipInfo + def get_CarrierRelease(self): + return self.CarrierRelease + def set_CarrierRelease(self, CarrierRelease): + self.CarrierRelease = CarrierRelease + def get_DropOffTime(self): + return self.DropOffTime + def set_DropOffTime(self, DropOffTime): + self.DropOffTime = DropOffTime + def get_ReturnCommitments(self): + return self.ReturnCommitments + def set_ReturnCommitments(self, ReturnCommitments): + self.ReturnCommitments = ReturnCommitments + def get_PrintCustomerRefNo(self): + return self.PrintCustomerRefNo + def set_PrintCustomerRefNo(self, PrintCustomerRefNo): + self.PrintCustomerRefNo = PrintCustomerRefNo + def get_Content(self): + return self.Content + def set_Content(self, Content): + self.Content = Content + def get_ActionCode(self): + return self.ActionCode + def set_ActionCode(self, ActionCode): + self.ActionCode = ActionCode + def get_OptOutOfSPE(self): + return self.OptOutOfSPE + def set_OptOutOfSPE(self, OptOutOfSPE): + self.OptOutOfSPE = OptOutOfSPE + def get_SortationLevel(self): + return self.SortationLevel + def set_SortationLevel(self, SortationLevel): + self.SortationLevel = SortationLevel + def get_DestinationEntryFacilityType(self): + return self.DestinationEntryFacilityType + def set_DestinationEntryFacilityType(self, DestinationEntryFacilityType): + self.DestinationEntryFacilityType = DestinationEntryFacilityType + def get_ShippingContents(self): + return self.ShippingContents + def set_ShippingContents(self, ShippingContents): + self.ShippingContents = ShippingContents + def get_CustomsContentType(self): + return self.CustomsContentType + def set_CustomsContentType(self, CustomsContentType): + self.CustomsContentType = CustomsContentType + def get_ContentComments(self): + return self.ContentComments + def set_ContentComments(self, ContentComments): + self.ContentComments = ContentComments + def get_RestrictionType(self): + return self.RestrictionType + def set_RestrictionType(self, RestrictionType): + self.RestrictionType = RestrictionType + def get_RestrictionComments(self): + return self.RestrictionComments + def set_RestrictionComments(self, RestrictionComments): + self.RestrictionComments = RestrictionComments + def get_AESITN(self): + return self.AESITN + def set_AESITN(self, AESITN): + self.AESITN = AESITN + def get_ImportersReference(self): + return self.ImportersReference + def set_ImportersReference(self, ImportersReference): + self.ImportersReference = ImportersReference + def get_ImportersContact(self): + return self.ImportersContact + def set_ImportersContact(self, ImportersContact): + self.ImportersContact = ImportersContact + def get_ExportersReference(self): + return self.ExportersReference + def set_ExportersReference(self, ExportersReference): + self.ExportersReference = ExportersReference + def get_ExportersContact(self): + return self.ExportersContact + def set_ExportersContact(self, ExportersContact): + self.ExportersContact = ExportersContact + def get_InvoiceNumber(self): + return self.InvoiceNumber + def set_InvoiceNumber(self, InvoiceNumber): + self.InvoiceNumber = InvoiceNumber + def get_LicenseNumber(self): + return self.LicenseNumber + def set_LicenseNumber(self, LicenseNumber): + self.LicenseNumber = LicenseNumber + def get_CertificateNumber(self): + return self.CertificateNumber + def set_CertificateNumber(self, CertificateNumber): + self.CertificateNumber = CertificateNumber + def get_NonDeliveryOption(self): + return self.NonDeliveryOption + def set_NonDeliveryOption(self, NonDeliveryOption): + self.NonDeliveryOption = NonDeliveryOption + def get_AltReturnAddress1(self): + return self.AltReturnAddress1 + def set_AltReturnAddress1(self, AltReturnAddress1): + self.AltReturnAddress1 = AltReturnAddress1 + def get_AltReturnAddress2(self): + return self.AltReturnAddress2 + def set_AltReturnAddress2(self, AltReturnAddress2): + self.AltReturnAddress2 = AltReturnAddress2 + def get_AltReturnAddress3(self): + return self.AltReturnAddress3 + def set_AltReturnAddress3(self, AltReturnAddress3): + self.AltReturnAddress3 = AltReturnAddress3 + def get_AltReturnAddress4(self): + return self.AltReturnAddress4 + def set_AltReturnAddress4(self, AltReturnAddress4): + self.AltReturnAddress4 = AltReturnAddress4 + def get_AltReturnAddress5(self): + return self.AltReturnAddress5 + def set_AltReturnAddress5(self, AltReturnAddress5): + self.AltReturnAddress5 = AltReturnAddress5 + def get_AltReturnAddress6(self): + return self.AltReturnAddress6 + def set_AltReturnAddress6(self, AltReturnAddress6): + self.AltReturnAddress6 = AltReturnAddress6 + def get_AltReturnCountry(self): + return self.AltReturnCountry + def set_AltReturnCountry(self, AltReturnCountry): + self.AltReturnCountry = AltReturnCountry + def get_LabelImportType(self): + return self.LabelImportType + def set_LabelImportType(self, LabelImportType): + self.LabelImportType = LabelImportType + def get_ePostageMailerReporting(self): + return self.ePostageMailerReporting + def set_ePostageMailerReporting(self, ePostageMailerReporting): + self.ePostageMailerReporting = ePostageMailerReporting + def get_SenderFirstName(self): + return self.SenderFirstName + def set_SenderFirstName(self, SenderFirstName): + self.SenderFirstName = SenderFirstName + def get_SenderLastName(self): + return self.SenderLastName + def set_SenderLastName(self, SenderLastName): + self.SenderLastName = SenderLastName + def get_SenderBusinessName(self): + return self.SenderBusinessName + def set_SenderBusinessName(self, SenderBusinessName): + self.SenderBusinessName = SenderBusinessName + def get_SenderAddress1(self): + return self.SenderAddress1 + def set_SenderAddress1(self, SenderAddress1): + self.SenderAddress1 = SenderAddress1 + def get_SenderCity(self): + return self.SenderCity + def set_SenderCity(self, SenderCity): + self.SenderCity = SenderCity + def get_SenderState(self): + return self.SenderState + def set_SenderState(self, SenderState): + self.SenderState = SenderState + def get_SenderZip5(self): + return self.SenderZip5 + def set_SenderZip5(self, SenderZip5): + self.SenderZip5 = SenderZip5 + def get_SenderPhone(self): + return self.SenderPhone + def set_SenderPhone(self, SenderPhone): + self.SenderPhone = SenderPhone + def get_ChargebackCode(self): + return self.ChargebackCode + def set_ChargebackCode(self, ChargebackCode): + self.ChargebackCode = ChargebackCode + def get_TrackingRetentionPeriod(self): + return self.TrackingRetentionPeriod + def set_TrackingRetentionPeriod(self, TrackingRetentionPeriod): + self.TrackingRetentionPeriod = TrackingRetentionPeriod + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def validate_ShipDateType(self, value): + result = True + # Validate type ShipDateType, a restriction on xs:string. + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False + if not self.gds_validate_simple_patterns( + self.validate_ShipDateType_patterns_, value): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_ShipDateType_patterns_, )) + result = False + return result + validate_ShipDateType_patterns_ = [['^(\\d{1,2}/ \\d{1,2}/ \\d\\d(\\d\\d)?)$']] + def has__content(self): + if ( + self.Option is not None or + self.Revision is not None or + self.ImageParameters is not None or + self.FromName is not None or + self.FromFirm is not None or + self.FromAddress1 is not None or + self.FromAddress2 is not None or + self.FromCity is not None or + self.FromState is not None or + self.FromZip5 is not None or + self.FromZip4 is not None or + self.FromPhone is not None or + self.POZipCode is not None or + self.AllowNonCleansedOriginAddr is not None or + self.ToName is not None or + self.ToFirm is not None or + self.ToAddress1 is not None or + self.ToAddress2 is not None or + self.ToCity is not None or + self.ToState is not None or + self.ToZip5 is not None or + self.ToZip4 is not None or + self.ToPhone is not None or + self.POBox is not None or + self.ToContactPreference is not None or + self.ToContactMessaging is not None or + self.ToContactEmail is not None or + self.AllowNonCleansedDestAddr is not None or + self.WeightInOunces is not None or + self.ServiceType is not None or + self.Container is not None or + self.Width is not None or + self.Length is not None or + self.Height is not None or + self.Girth is not None or + self.Machinable is not None or + self.ProcessingCategory is not None or + self.PriceOptions is not None or + self.InsuredAmount is not None or + self.AddressServiceRequested is not None or + self.ExpressMailOptions is not None or + self.ShipDate is not None or + self.CustomerRefNo is not None or + self.CustomerRefNo2 is not None or + self.ExtraServices is not None or + self.HoldForPickup is not None or + self.OpenDistribute is not None or + self.PermitNumber is not None or + self.PermitZIPCode is not None or + self.PermitHolderName is not None or + self.CRID is not None or + self.MID is not None or + self.LogisticsManagerMID is not None or + self.VendorCode is not None or + self.VendorProductVersionNumber is not None or + self.SenderName is not None or + self.SenderEMail is not None or + self.RecipientName is not None or + self.RecipientEMail is not None or + self.ReceiptOption is not None or + self.ImageType is not None or + self.HoldForManifest is not None or + self.NineDigitRoutingZip is not None or + self.ShipInfo is not None or + self.CarrierRelease is not None or + self.DropOffTime is not None or + self.ReturnCommitments is not None or + self.PrintCustomerRefNo is not None or + self.Content is not None or + self.ActionCode is not None or + self.OptOutOfSPE is not None or + self.SortationLevel is not None or + self.DestinationEntryFacilityType is not None or + self.ShippingContents is not None or + self.CustomsContentType is not None or + self.ContentComments is not None or + self.RestrictionType is not None or + self.RestrictionComments is not None or + self.AESITN is not None or + self.ImportersReference is not None or + self.ImportersContact is not None or + self.ExportersReference is not None or + self.ExportersContact is not None or + self.InvoiceNumber is not None or + self.LicenseNumber is not None or + self.CertificateNumber is not None or + self.NonDeliveryOption is not None or + self.AltReturnAddress1 is not None or + self.AltReturnAddress2 is not None or + self.AltReturnAddress3 is not None or + self.AltReturnAddress4 is not None or + self.AltReturnAddress5 is not None or + self.AltReturnAddress6 is not None or + self.AltReturnCountry is not None or + self.LabelImportType is not None or + self.ePostageMailerReporting is not None or + self.SenderFirstName is not None or + self.SenderLastName is not None or + self.SenderBusinessName is not None or + self.SenderAddress1 is not None or + self.SenderCity is not None or + self.SenderState is not None or + self.SenderZip5 is not None or + self.SenderPhone is not None or + self.ChargebackCode is not None or + self.TrackingRetentionPeriod is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('eVSRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'eVSRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='eVSRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='eVSRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='eVSRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Option is not None: + namespaceprefix_ = self.Option_nsprefix_ + ':' if (UseCapturedNS_ and self.Option_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOption>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Option, input_name='Option'), namespaceprefix_ , eol_)) + if self.Revision is not None: + namespaceprefix_ = self.Revision_nsprefix_ + ':' if (UseCapturedNS_ and self.Revision_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRevision>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Revision), input_name='Revision')), namespaceprefix_ , eol_)) + if self.ImageParameters is not None: + namespaceprefix_ = self.ImageParameters_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageParameters_nsprefix_) else '' + self.ImageParameters.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ImageParameters', pretty_print=pretty_print) + if self.FromName is not None: + namespaceprefix_ = self.FromName_nsprefix_ + ':' if (UseCapturedNS_ and self.FromName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromName), input_name='FromName')), namespaceprefix_ , eol_)) + if self.FromFirm is not None: + namespaceprefix_ = self.FromFirm_nsprefix_ + ':' if (UseCapturedNS_ and self.FromFirm_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromFirm>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromFirm), input_name='FromFirm')), namespaceprefix_ , eol_)) + if self.FromAddress1 is not None: + namespaceprefix_ = self.FromAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.FromAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromAddress1), input_name='FromAddress1')), namespaceprefix_ , eol_)) + if self.FromAddress2 is not None: + namespaceprefix_ = self.FromAddress2_nsprefix_ + ':' if (UseCapturedNS_ and self.FromAddress2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromAddress2), input_name='FromAddress2')), namespaceprefix_ , eol_)) + if self.FromCity is not None: + namespaceprefix_ = self.FromCity_nsprefix_ + ':' if (UseCapturedNS_ and self.FromCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromCity), input_name='FromCity')), namespaceprefix_ , eol_)) + if self.FromState is not None: + namespaceprefix_ = self.FromState_nsprefix_ + ':' if (UseCapturedNS_ and self.FromState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromState), input_name='FromState')), namespaceprefix_ , eol_)) + if self.FromZip5 is not None: + namespaceprefix_ = self.FromZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.FromZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromZip5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromZip5), input_name='FromZip5')), namespaceprefix_ , eol_)) + if self.FromZip4 is not None: + namespaceprefix_ = self.FromZip4_nsprefix_ + ':' if (UseCapturedNS_ and self.FromZip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromZip4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromZip4), input_name='FromZip4')), namespaceprefix_ , eol_)) + if self.FromPhone is not None: + namespaceprefix_ = self.FromPhone_nsprefix_ + ':' if (UseCapturedNS_ and self.FromPhone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromPhone), input_name='FromPhone')), namespaceprefix_ , eol_)) + if self.POZipCode is not None: + namespaceprefix_ = self.POZipCode_nsprefix_ + ':' if (UseCapturedNS_ and self.POZipCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPOZipCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.POZipCode), input_name='POZipCode')), namespaceprefix_ , eol_)) + if self.AllowNonCleansedOriginAddr is not None: + namespaceprefix_ = self.AllowNonCleansedOriginAddr_nsprefix_ + ':' if (UseCapturedNS_ and self.AllowNonCleansedOriginAddr_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAllowNonCleansedOriginAddr>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.AllowNonCleansedOriginAddr, input_name='AllowNonCleansedOriginAddr'), namespaceprefix_ , eol_)) + if self.ToName is not None: + namespaceprefix_ = self.ToName_nsprefix_ + ':' if (UseCapturedNS_ and self.ToName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToName), input_name='ToName')), namespaceprefix_ , eol_)) + if self.ToFirm is not None: + namespaceprefix_ = self.ToFirm_nsprefix_ + ':' if (UseCapturedNS_ and self.ToFirm_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToFirm>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToFirm), input_name='ToFirm')), namespaceprefix_ , eol_)) + if self.ToAddress1 is not None: + namespaceprefix_ = self.ToAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress1), input_name='ToAddress1')), namespaceprefix_ , eol_)) + if self.ToAddress2 is not None: + namespaceprefix_ = self.ToAddress2_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress2), input_name='ToAddress2')), namespaceprefix_ , eol_)) + if self.ToCity is not None: + namespaceprefix_ = self.ToCity_nsprefix_ + ':' if (UseCapturedNS_ and self.ToCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToCity), input_name='ToCity')), namespaceprefix_ , eol_)) + if self.ToState is not None: + namespaceprefix_ = self.ToState_nsprefix_ + ':' if (UseCapturedNS_ and self.ToState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToState), input_name='ToState')), namespaceprefix_ , eol_)) + if self.ToZip5 is not None: + namespaceprefix_ = self.ToZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.ToZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToZip5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToZip5), input_name='ToZip5')), namespaceprefix_ , eol_)) + if self.ToZip4 is not None: + namespaceprefix_ = self.ToZip4_nsprefix_ + ':' if (UseCapturedNS_ and self.ToZip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToZip4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToZip4), input_name='ToZip4')), namespaceprefix_ , eol_)) + if self.ToPhone is not None: + namespaceprefix_ = self.ToPhone_nsprefix_ + ':' if (UseCapturedNS_ and self.ToPhone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToPhone), input_name='ToPhone')), namespaceprefix_ , eol_)) + if self.POBox is not None: + namespaceprefix_ = self.POBox_nsprefix_ + ':' if (UseCapturedNS_ and self.POBox_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPOBox>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.POBox), input_name='POBox')), namespaceprefix_ , eol_)) + if self.ToContactPreference is not None: + namespaceprefix_ = self.ToContactPreference_nsprefix_ + ':' if (UseCapturedNS_ and self.ToContactPreference_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToContactPreference>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToContactPreference), input_name='ToContactPreference')), namespaceprefix_ , eol_)) + if self.ToContactMessaging is not None: + namespaceprefix_ = self.ToContactMessaging_nsprefix_ + ':' if (UseCapturedNS_ and self.ToContactMessaging_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToContactMessaging>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToContactMessaging), input_name='ToContactMessaging')), namespaceprefix_ , eol_)) + if self.ToContactEmail is not None: + namespaceprefix_ = self.ToContactEmail_nsprefix_ + ':' if (UseCapturedNS_ and self.ToContactEmail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToContactEmail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToContactEmail), input_name='ToContactEmail')), namespaceprefix_ , eol_)) + if self.AllowNonCleansedDestAddr is not None: + namespaceprefix_ = self.AllowNonCleansedDestAddr_nsprefix_ + ':' if (UseCapturedNS_ and self.AllowNonCleansedDestAddr_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAllowNonCleansedDestAddr>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.AllowNonCleansedDestAddr, input_name='AllowNonCleansedDestAddr'), namespaceprefix_ , eol_)) + if self.WeightInOunces is not None: + namespaceprefix_ = self.WeightInOunces_nsprefix_ + ':' if (UseCapturedNS_ and self.WeightInOunces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sWeightInOunces>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.WeightInOunces), input_name='WeightInOunces')), namespaceprefix_ , eol_)) + if self.ServiceType is not None: + namespaceprefix_ = self.ServiceType_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceType), input_name='ServiceType')), namespaceprefix_ , eol_)) + if self.Container is not None: + namespaceprefix_ = self.Container_nsprefix_ + ':' if (UseCapturedNS_ and self.Container_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContainer>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Container), input_name='Container')), namespaceprefix_ , eol_)) + if self.Width is not None: + namespaceprefix_ = self.Width_nsprefix_ + ':' if (UseCapturedNS_ and self.Width_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sWidth>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Width, input_name='Width'), namespaceprefix_ , eol_)) + if self.Length is not None: + namespaceprefix_ = self.Length_nsprefix_ + ':' if (UseCapturedNS_ and self.Length_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLength>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Length, input_name='Length'), namespaceprefix_ , eol_)) + if self.Height is not None: + namespaceprefix_ = self.Height_nsprefix_ + ':' if (UseCapturedNS_ and self.Height_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHeight>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Height, input_name='Height'), namespaceprefix_ , eol_)) + if self.Girth is not None: + namespaceprefix_ = self.Girth_nsprefix_ + ':' if (UseCapturedNS_ and self.Girth_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGirth>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Girth, input_name='Girth'), namespaceprefix_ , eol_)) + if self.Machinable is not None: + namespaceprefix_ = self.Machinable_nsprefix_ + ':' if (UseCapturedNS_ and self.Machinable_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMachinable>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Machinable), input_name='Machinable')), namespaceprefix_ , eol_)) + if self.ProcessingCategory is not None: + namespaceprefix_ = self.ProcessingCategory_nsprefix_ + ':' if (UseCapturedNS_ and self.ProcessingCategory_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sProcessingCategory>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ProcessingCategory), input_name='ProcessingCategory')), namespaceprefix_ , eol_)) + if self.PriceOptions is not None: + namespaceprefix_ = self.PriceOptions_nsprefix_ + ':' if (UseCapturedNS_ and self.PriceOptions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPriceOptions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PriceOptions), input_name='PriceOptions')), namespaceprefix_ , eol_)) + if self.InsuredAmount is not None: + namespaceprefix_ = self.InsuredAmount_nsprefix_ + ':' if (UseCapturedNS_ and self.InsuredAmount_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInsuredAmount>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.InsuredAmount, input_name='InsuredAmount'), namespaceprefix_ , eol_)) + if self.AddressServiceRequested is not None: + namespaceprefix_ = self.AddressServiceRequested_nsprefix_ + ':' if (UseCapturedNS_ and self.AddressServiceRequested_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddressServiceRequested>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.AddressServiceRequested, input_name='AddressServiceRequested'), namespaceprefix_ , eol_)) + if self.ExpressMailOptions is not None: + namespaceprefix_ = self.ExpressMailOptions_nsprefix_ + ':' if (UseCapturedNS_ and self.ExpressMailOptions_nsprefix_) else '' + self.ExpressMailOptions.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ExpressMailOptions', pretty_print=pretty_print) + if self.ShipDate is not None: + namespaceprefix_ = self.ShipDate_nsprefix_ + ':' if (UseCapturedNS_ and self.ShipDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sShipDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ShipDate), input_name='ShipDate')), namespaceprefix_ , eol_)) + if self.CustomerRefNo is not None: + namespaceprefix_ = self.CustomerRefNo_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerRefNo_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerRefNo>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerRefNo), input_name='CustomerRefNo')), namespaceprefix_ , eol_)) + if self.CustomerRefNo2 is not None: + namespaceprefix_ = self.CustomerRefNo2_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerRefNo2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerRefNo2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerRefNo2), input_name='CustomerRefNo2')), namespaceprefix_ , eol_)) + if self.ExtraServices is not None: + namespaceprefix_ = self.ExtraServices_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraServices_nsprefix_) else '' + self.ExtraServices.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ExtraServices', pretty_print=pretty_print) + if self.HoldForPickup is not None: + namespaceprefix_ = self.HoldForPickup_nsprefix_ + ':' if (UseCapturedNS_ and self.HoldForPickup_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHoldForPickup>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.HoldForPickup), input_name='HoldForPickup')), namespaceprefix_ , eol_)) + if self.OpenDistribute is not None: + namespaceprefix_ = self.OpenDistribute_nsprefix_ + ':' if (UseCapturedNS_ and self.OpenDistribute_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOpenDistribute>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.OpenDistribute), input_name='OpenDistribute')), namespaceprefix_ , eol_)) + if self.PermitNumber is not None: + namespaceprefix_ = self.PermitNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PermitNumber), input_name='PermitNumber')), namespaceprefix_ , eol_)) + if self.PermitZIPCode is not None: + namespaceprefix_ = self.PermitZIPCode_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitZIPCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitZIPCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PermitZIPCode), input_name='PermitZIPCode')), namespaceprefix_ , eol_)) + if self.PermitHolderName is not None: + namespaceprefix_ = self.PermitHolderName_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitHolderName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitHolderName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PermitHolderName), input_name='PermitHolderName')), namespaceprefix_ , eol_)) + if self.CRID is not None: + namespaceprefix_ = self.CRID_nsprefix_ + ':' if (UseCapturedNS_ and self.CRID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCRID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CRID), input_name='CRID')), namespaceprefix_ , eol_)) + if self.MID is not None: + namespaceprefix_ = self.MID_nsprefix_ + ':' if (UseCapturedNS_ and self.MID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MID), input_name='MID')), namespaceprefix_ , eol_)) + if self.LogisticsManagerMID is not None: + namespaceprefix_ = self.LogisticsManagerMID_nsprefix_ + ':' if (UseCapturedNS_ and self.LogisticsManagerMID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLogisticsManagerMID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LogisticsManagerMID), input_name='LogisticsManagerMID')), namespaceprefix_ , eol_)) + if self.VendorCode is not None: + namespaceprefix_ = self.VendorCode_nsprefix_ + ':' if (UseCapturedNS_ and self.VendorCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sVendorCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.VendorCode), input_name='VendorCode')), namespaceprefix_ , eol_)) + if self.VendorProductVersionNumber is not None: + namespaceprefix_ = self.VendorProductVersionNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.VendorProductVersionNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sVendorProductVersionNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.VendorProductVersionNumber), input_name='VendorProductVersionNumber')), namespaceprefix_ , eol_)) + if self.SenderName is not None: + namespaceprefix_ = self.SenderName_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderName), input_name='SenderName')), namespaceprefix_ , eol_)) + if self.SenderEMail is not None: + namespaceprefix_ = self.SenderEMail_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderEMail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderEMail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderEMail), input_name='SenderEMail')), namespaceprefix_ , eol_)) + if self.RecipientName is not None: + namespaceprefix_ = self.RecipientName_nsprefix_ + ':' if (UseCapturedNS_ and self.RecipientName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRecipientName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RecipientName), input_name='RecipientName')), namespaceprefix_ , eol_)) + if self.RecipientEMail is not None: + namespaceprefix_ = self.RecipientEMail_nsprefix_ + ':' if (UseCapturedNS_ and self.RecipientEMail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRecipientEMail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RecipientEMail), input_name='RecipientEMail')), namespaceprefix_ , eol_)) + if self.ReceiptOption is not None: + namespaceprefix_ = self.ReceiptOption_nsprefix_ + ':' if (UseCapturedNS_ and self.ReceiptOption_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sReceiptOption>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ReceiptOption), input_name='ReceiptOption')), namespaceprefix_ , eol_)) + if self.ImageType is not None: + namespaceprefix_ = self.ImageType_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageType), input_name='ImageType')), namespaceprefix_ , eol_)) + if self.HoldForManifest is not None: + namespaceprefix_ = self.HoldForManifest_nsprefix_ + ':' if (UseCapturedNS_ and self.HoldForManifest_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHoldForManifest>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.HoldForManifest), input_name='HoldForManifest')), namespaceprefix_ , eol_)) + if self.NineDigitRoutingZip is not None: + namespaceprefix_ = self.NineDigitRoutingZip_nsprefix_ + ':' if (UseCapturedNS_ and self.NineDigitRoutingZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNineDigitRoutingZip>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.NineDigitRoutingZip, input_name='NineDigitRoutingZip'), namespaceprefix_ , eol_)) + if self.ShipInfo is not None: + namespaceprefix_ = self.ShipInfo_nsprefix_ + ':' if (UseCapturedNS_ and self.ShipInfo_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sShipInfo>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.ShipInfo, input_name='ShipInfo'), namespaceprefix_ , eol_)) + if self.CarrierRelease is not None: + namespaceprefix_ = self.CarrierRelease_nsprefix_ + ':' if (UseCapturedNS_ and self.CarrierRelease_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCarrierRelease>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.CarrierRelease, input_name='CarrierRelease'), namespaceprefix_ , eol_)) + if self.DropOffTime is not None: + namespaceprefix_ = self.DropOffTime_nsprefix_ + ':' if (UseCapturedNS_ and self.DropOffTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDropOffTime>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.DropOffTime, input_name='DropOffTime'), namespaceprefix_ , eol_)) + if self.ReturnCommitments is not None: + namespaceprefix_ = self.ReturnCommitments_nsprefix_ + ':' if (UseCapturedNS_ and self.ReturnCommitments_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sReturnCommitments>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.ReturnCommitments, input_name='ReturnCommitments'), namespaceprefix_ , eol_)) + if self.PrintCustomerRefNo is not None: + namespaceprefix_ = self.PrintCustomerRefNo_nsprefix_ + ':' if (UseCapturedNS_ and self.PrintCustomerRefNo_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPrintCustomerRefNo>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PrintCustomerRefNo), input_name='PrintCustomerRefNo')), namespaceprefix_ , eol_)) + if self.Content is not None: + namespaceprefix_ = self.Content_nsprefix_ + ':' if (UseCapturedNS_ and self.Content_nsprefix_) else '' + self.Content.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Content', pretty_print=pretty_print) + if self.ActionCode is not None: + namespaceprefix_ = self.ActionCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ActionCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sActionCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ActionCode), input_name='ActionCode')), namespaceprefix_ , eol_)) + if self.OptOutOfSPE is not None: + namespaceprefix_ = self.OptOutOfSPE_nsprefix_ + ':' if (UseCapturedNS_ and self.OptOutOfSPE_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOptOutOfSPE>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.OptOutOfSPE), input_name='OptOutOfSPE')), namespaceprefix_ , eol_)) + if self.SortationLevel is not None: + namespaceprefix_ = self.SortationLevel_nsprefix_ + ':' if (UseCapturedNS_ and self.SortationLevel_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSortationLevel>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SortationLevel), input_name='SortationLevel')), namespaceprefix_ , eol_)) + if self.DestinationEntryFacilityType is not None: + namespaceprefix_ = self.DestinationEntryFacilityType_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationEntryFacilityType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationEntryFacilityType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DestinationEntryFacilityType), input_name='DestinationEntryFacilityType')), namespaceprefix_ , eol_)) + if self.ShippingContents is not None: + namespaceprefix_ = self.ShippingContents_nsprefix_ + ':' if (UseCapturedNS_ and self.ShippingContents_nsprefix_) else '' + self.ShippingContents.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ShippingContents', pretty_print=pretty_print) + if self.CustomsContentType is not None: + namespaceprefix_ = self.CustomsContentType_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomsContentType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomsContentType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomsContentType), input_name='CustomsContentType')), namespaceprefix_ , eol_)) + if self.ContentComments is not None: + namespaceprefix_ = self.ContentComments_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentComments_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentComments>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentComments), input_name='ContentComments')), namespaceprefix_ , eol_)) + if self.RestrictionType is not None: + namespaceprefix_ = self.RestrictionType_nsprefix_ + ':' if (UseCapturedNS_ and self.RestrictionType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRestrictionType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RestrictionType), input_name='RestrictionType')), namespaceprefix_ , eol_)) + if self.RestrictionComments is not None: + namespaceprefix_ = self.RestrictionComments_nsprefix_ + ':' if (UseCapturedNS_ and self.RestrictionComments_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRestrictionComments>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RestrictionComments), input_name='RestrictionComments')), namespaceprefix_ , eol_)) + if self.AESITN is not None: + namespaceprefix_ = self.AESITN_nsprefix_ + ':' if (UseCapturedNS_ and self.AESITN_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAESITN>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AESITN), input_name='AESITN')), namespaceprefix_ , eol_)) + if self.ImportersReference is not None: + namespaceprefix_ = self.ImportersReference_nsprefix_ + ':' if (UseCapturedNS_ and self.ImportersReference_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImportersReference>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImportersReference), input_name='ImportersReference')), namespaceprefix_ , eol_)) + if self.ImportersContact is not None: + namespaceprefix_ = self.ImportersContact_nsprefix_ + ':' if (UseCapturedNS_ and self.ImportersContact_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImportersContact>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImportersContact), input_name='ImportersContact')), namespaceprefix_ , eol_)) + if self.ExportersReference is not None: + namespaceprefix_ = self.ExportersReference_nsprefix_ + ':' if (UseCapturedNS_ and self.ExportersReference_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sExportersReference>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ExportersReference), input_name='ExportersReference')), namespaceprefix_ , eol_)) + if self.ExportersContact is not None: + namespaceprefix_ = self.ExportersContact_nsprefix_ + ':' if (UseCapturedNS_ and self.ExportersContact_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sExportersContact>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ExportersContact), input_name='ExportersContact')), namespaceprefix_ , eol_)) + if self.InvoiceNumber is not None: + namespaceprefix_ = self.InvoiceNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.InvoiceNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInvoiceNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InvoiceNumber), input_name='InvoiceNumber')), namespaceprefix_ , eol_)) + if self.LicenseNumber is not None: + namespaceprefix_ = self.LicenseNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.LicenseNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLicenseNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LicenseNumber), input_name='LicenseNumber')), namespaceprefix_ , eol_)) + if self.CertificateNumber is not None: + namespaceprefix_ = self.CertificateNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.CertificateNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCertificateNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CertificateNumber), input_name='CertificateNumber')), namespaceprefix_ , eol_)) + if self.NonDeliveryOption is not None: + namespaceprefix_ = self.NonDeliveryOption_nsprefix_ + ':' if (UseCapturedNS_ and self.NonDeliveryOption_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNonDeliveryOption>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.NonDeliveryOption), input_name='NonDeliveryOption')), namespaceprefix_ , eol_)) + if self.AltReturnAddress1 is not None: + namespaceprefix_ = self.AltReturnAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.AltReturnAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAltReturnAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AltReturnAddress1), input_name='AltReturnAddress1')), namespaceprefix_ , eol_)) + if self.AltReturnAddress2 is not None: + namespaceprefix_ = self.AltReturnAddress2_nsprefix_ + ':' if (UseCapturedNS_ and self.AltReturnAddress2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAltReturnAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AltReturnAddress2), input_name='AltReturnAddress2')), namespaceprefix_ , eol_)) + if self.AltReturnAddress3 is not None: + namespaceprefix_ = self.AltReturnAddress3_nsprefix_ + ':' if (UseCapturedNS_ and self.AltReturnAddress3_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAltReturnAddress3>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AltReturnAddress3), input_name='AltReturnAddress3')), namespaceprefix_ , eol_)) + if self.AltReturnAddress4 is not None: + namespaceprefix_ = self.AltReturnAddress4_nsprefix_ + ':' if (UseCapturedNS_ and self.AltReturnAddress4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAltReturnAddress4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AltReturnAddress4), input_name='AltReturnAddress4')), namespaceprefix_ , eol_)) + if self.AltReturnAddress5 is not None: + namespaceprefix_ = self.AltReturnAddress5_nsprefix_ + ':' if (UseCapturedNS_ and self.AltReturnAddress5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAltReturnAddress5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AltReturnAddress5), input_name='AltReturnAddress5')), namespaceprefix_ , eol_)) + if self.AltReturnAddress6 is not None: + namespaceprefix_ = self.AltReturnAddress6_nsprefix_ + ':' if (UseCapturedNS_ and self.AltReturnAddress6_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAltReturnAddress6>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AltReturnAddress6), input_name='AltReturnAddress6')), namespaceprefix_ , eol_)) + if self.AltReturnCountry is not None: + namespaceprefix_ = self.AltReturnCountry_nsprefix_ + ':' if (UseCapturedNS_ and self.AltReturnCountry_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAltReturnCountry>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AltReturnCountry), input_name='AltReturnCountry')), namespaceprefix_ , eol_)) + if self.LabelImportType is not None: + namespaceprefix_ = self.LabelImportType_nsprefix_ + ':' if (UseCapturedNS_ and self.LabelImportType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLabelImportType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LabelImportType), input_name='LabelImportType')), namespaceprefix_ , eol_)) + if self.ePostageMailerReporting is not None: + namespaceprefix_ = self.ePostageMailerReporting_nsprefix_ + ':' if (UseCapturedNS_ and self.ePostageMailerReporting_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sePostageMailerReporting>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ePostageMailerReporting), input_name='ePostageMailerReporting')), namespaceprefix_ , eol_)) + if self.SenderFirstName is not None: + namespaceprefix_ = self.SenderFirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderFirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderFirstName), input_name='SenderFirstName')), namespaceprefix_ , eol_)) + if self.SenderLastName is not None: + namespaceprefix_ = self.SenderLastName_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderLastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderLastName), input_name='SenderLastName')), namespaceprefix_ , eol_)) + if self.SenderBusinessName is not None: + namespaceprefix_ = self.SenderBusinessName_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderBusinessName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderBusinessName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderBusinessName), input_name='SenderBusinessName')), namespaceprefix_ , eol_)) + if self.SenderAddress1 is not None: + namespaceprefix_ = self.SenderAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderAddress1), input_name='SenderAddress1')), namespaceprefix_ , eol_)) + if self.SenderCity is not None: + namespaceprefix_ = self.SenderCity_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderCity), input_name='SenderCity')), namespaceprefix_ , eol_)) + if self.SenderState is not None: + namespaceprefix_ = self.SenderState_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderState), input_name='SenderState')), namespaceprefix_ , eol_)) + if self.SenderZip5 is not None: + namespaceprefix_ = self.SenderZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderZip5>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderZip5), input_name='SenderZip5')), namespaceprefix_ , eol_)) + if self.SenderPhone is not None: + namespaceprefix_ = self.SenderPhone_nsprefix_ + ':' if (UseCapturedNS_ and self.SenderPhone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSenderPhone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SenderPhone), input_name='SenderPhone')), namespaceprefix_ , eol_)) + if self.ChargebackCode is not None: + namespaceprefix_ = self.ChargebackCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ChargebackCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sChargebackCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ChargebackCode), input_name='ChargebackCode')), namespaceprefix_ , eol_)) + if self.TrackingRetentionPeriod is not None: + namespaceprefix_ = self.TrackingRetentionPeriod_nsprefix_ + ':' if (UseCapturedNS_ and self.TrackingRetentionPeriod_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTrackingRetentionPeriod>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TrackingRetentionPeriod), input_name='TrackingRetentionPeriod')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Option' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Option') + ival_ = self.gds_validate_integer(ival_, node, 'Option') + self.Option = ival_ + self.Option_nsprefix_ = child_.prefix + elif nodeName_ == 'Revision': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Revision') + value_ = self.gds_validate_string(value_, node, 'Revision') + self.Revision = value_ + self.Revision_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageParameters': + obj_ = ImageParametersType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ImageParameters = obj_ + obj_.original_tagname_ = 'ImageParameters' + elif nodeName_ == 'FromName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromName') + value_ = self.gds_validate_string(value_, node, 'FromName') + self.FromName = value_ + self.FromName_nsprefix_ = child_.prefix + elif nodeName_ == 'FromFirm': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromFirm') + value_ = self.gds_validate_string(value_, node, 'FromFirm') + self.FromFirm = value_ + self.FromFirm_nsprefix_ = child_.prefix + elif nodeName_ == 'FromAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromAddress1') + value_ = self.gds_validate_string(value_, node, 'FromAddress1') + self.FromAddress1 = value_ + self.FromAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'FromAddress2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromAddress2') + value_ = self.gds_validate_string(value_, node, 'FromAddress2') + self.FromAddress2 = value_ + self.FromAddress2_nsprefix_ = child_.prefix + elif nodeName_ == 'FromCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromCity') + value_ = self.gds_validate_string(value_, node, 'FromCity') + self.FromCity = value_ + self.FromCity_nsprefix_ = child_.prefix + elif nodeName_ == 'FromState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromState') + value_ = self.gds_validate_string(value_, node, 'FromState') + self.FromState = value_ + self.FromState_nsprefix_ = child_.prefix + elif nodeName_ == 'FromZip5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromZip5') + value_ = self.gds_validate_string(value_, node, 'FromZip5') + self.FromZip5 = value_ + self.FromZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'FromZip4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromZip4') + value_ = self.gds_validate_string(value_, node, 'FromZip4') + self.FromZip4 = value_ + self.FromZip4_nsprefix_ = child_.prefix + elif nodeName_ == 'FromPhone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromPhone') + value_ = self.gds_validate_string(value_, node, 'FromPhone') + self.FromPhone = value_ + self.FromPhone_nsprefix_ = child_.prefix + elif nodeName_ == 'POZipCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'POZipCode') + value_ = self.gds_validate_string(value_, node, 'POZipCode') + self.POZipCode = value_ + self.POZipCode_nsprefix_ = child_.prefix + elif nodeName_ == 'AllowNonCleansedOriginAddr': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'AllowNonCleansedOriginAddr') + ival_ = self.gds_validate_boolean(ival_, node, 'AllowNonCleansedOriginAddr') + self.AllowNonCleansedOriginAddr = ival_ + self.AllowNonCleansedOriginAddr_nsprefix_ = child_.prefix + elif nodeName_ == 'ToName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToName') + value_ = self.gds_validate_string(value_, node, 'ToName') + self.ToName = value_ + self.ToName_nsprefix_ = child_.prefix + elif nodeName_ == 'ToFirm': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToFirm') + value_ = self.gds_validate_string(value_, node, 'ToFirm') + self.ToFirm = value_ + self.ToFirm_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress1') + value_ = self.gds_validate_string(value_, node, 'ToAddress1') + self.ToAddress1 = value_ + self.ToAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress2') + value_ = self.gds_validate_string(value_, node, 'ToAddress2') + self.ToAddress2 = value_ + self.ToAddress2_nsprefix_ = child_.prefix + elif nodeName_ == 'ToCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToCity') + value_ = self.gds_validate_string(value_, node, 'ToCity') + self.ToCity = value_ + self.ToCity_nsprefix_ = child_.prefix + elif nodeName_ == 'ToState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToState') + value_ = self.gds_validate_string(value_, node, 'ToState') + self.ToState = value_ + self.ToState_nsprefix_ = child_.prefix + elif nodeName_ == 'ToZip5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToZip5') + value_ = self.gds_validate_string(value_, node, 'ToZip5') + self.ToZip5 = value_ + self.ToZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'ToZip4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToZip4') + value_ = self.gds_validate_string(value_, node, 'ToZip4') + self.ToZip4 = value_ + self.ToZip4_nsprefix_ = child_.prefix + elif nodeName_ == 'ToPhone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToPhone') + value_ = self.gds_validate_string(value_, node, 'ToPhone') + self.ToPhone = value_ + self.ToPhone_nsprefix_ = child_.prefix + elif nodeName_ == 'POBox': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'POBox') + value_ = self.gds_validate_string(value_, node, 'POBox') + self.POBox = value_ + self.POBox_nsprefix_ = child_.prefix + elif nodeName_ == 'ToContactPreference': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToContactPreference') + value_ = self.gds_validate_string(value_, node, 'ToContactPreference') + self.ToContactPreference = value_ + self.ToContactPreference_nsprefix_ = child_.prefix + elif nodeName_ == 'ToContactMessaging': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToContactMessaging') + value_ = self.gds_validate_string(value_, node, 'ToContactMessaging') + self.ToContactMessaging = value_ + self.ToContactMessaging_nsprefix_ = child_.prefix + elif nodeName_ == 'ToContactEmail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToContactEmail') + value_ = self.gds_validate_string(value_, node, 'ToContactEmail') + self.ToContactEmail = value_ + self.ToContactEmail_nsprefix_ = child_.prefix + elif nodeName_ == 'AllowNonCleansedDestAddr': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'AllowNonCleansedDestAddr') + ival_ = self.gds_validate_boolean(ival_, node, 'AllowNonCleansedDestAddr') + self.AllowNonCleansedDestAddr = ival_ + self.AllowNonCleansedDestAddr_nsprefix_ = child_.prefix + elif nodeName_ == 'WeightInOunces': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'WeightInOunces') + value_ = self.gds_validate_string(value_, node, 'WeightInOunces') + self.WeightInOunces = value_ + self.WeightInOunces_nsprefix_ = child_.prefix + elif nodeName_ == 'ServiceType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceType') + value_ = self.gds_validate_string(value_, node, 'ServiceType') + self.ServiceType = value_ + self.ServiceType_nsprefix_ = child_.prefix + elif nodeName_ == 'Container': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Container') + value_ = self.gds_validate_string(value_, node, 'Container') + self.Container = value_ + self.Container_nsprefix_ = child_.prefix + elif nodeName_ == 'Width' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Width') + fval_ = self.gds_validate_decimal(fval_, node, 'Width') + self.Width = fval_ + self.Width_nsprefix_ = child_.prefix + elif nodeName_ == 'Length' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Length') + fval_ = self.gds_validate_decimal(fval_, node, 'Length') + self.Length = fval_ + self.Length_nsprefix_ = child_.prefix + elif nodeName_ == 'Height' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Height') + fval_ = self.gds_validate_decimal(fval_, node, 'Height') + self.Height = fval_ + self.Height_nsprefix_ = child_.prefix + elif nodeName_ == 'Girth' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Girth') + fval_ = self.gds_validate_decimal(fval_, node, 'Girth') + self.Girth = fval_ + self.Girth_nsprefix_ = child_.prefix + elif nodeName_ == 'Machinable': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Machinable') + value_ = self.gds_validate_string(value_, node, 'Machinable') + self.Machinable = value_ + self.Machinable_nsprefix_ = child_.prefix + elif nodeName_ == 'ProcessingCategory': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ProcessingCategory') + value_ = self.gds_validate_string(value_, node, 'ProcessingCategory') + self.ProcessingCategory = value_ + self.ProcessingCategory_nsprefix_ = child_.prefix + elif nodeName_ == 'PriceOptions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PriceOptions') + value_ = self.gds_validate_string(value_, node, 'PriceOptions') + self.PriceOptions = value_ + self.PriceOptions_nsprefix_ = child_.prefix + elif nodeName_ == 'InsuredAmount' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'InsuredAmount') + fval_ = self.gds_validate_decimal(fval_, node, 'InsuredAmount') + self.InsuredAmount = fval_ + self.InsuredAmount_nsprefix_ = child_.prefix + elif nodeName_ == 'AddressServiceRequested': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'AddressServiceRequested') + ival_ = self.gds_validate_boolean(ival_, node, 'AddressServiceRequested') + self.AddressServiceRequested = ival_ + self.AddressServiceRequested_nsprefix_ = child_.prefix + elif nodeName_ == 'ExpressMailOptions': + obj_ = ExpressMailOptionsType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ExpressMailOptions = obj_ + obj_.original_tagname_ = 'ExpressMailOptions' + elif nodeName_ == 'ShipDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ShipDate') + value_ = self.gds_validate_string(value_, node, 'ShipDate') + self.ShipDate = value_ + self.ShipDate_nsprefix_ = child_.prefix + # validate type ShipDateType + self.validate_ShipDateType(self.ShipDate) + elif nodeName_ == 'CustomerRefNo': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerRefNo') + value_ = self.gds_validate_string(value_, node, 'CustomerRefNo') + self.CustomerRefNo = value_ + self.CustomerRefNo_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerRefNo2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerRefNo2') + value_ = self.gds_validate_string(value_, node, 'CustomerRefNo2') + self.CustomerRefNo2 = value_ + self.CustomerRefNo2_nsprefix_ = child_.prefix + elif nodeName_ == 'ExtraServices': + obj_ = ExtraServicesType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ExtraServices = obj_ + obj_.original_tagname_ = 'ExtraServices' + elif nodeName_ == 'HoldForPickup': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'HoldForPickup') + value_ = self.gds_validate_string(value_, node, 'HoldForPickup') + self.HoldForPickup = value_ + self.HoldForPickup_nsprefix_ = child_.prefix + elif nodeName_ == 'OpenDistribute': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'OpenDistribute') + value_ = self.gds_validate_string(value_, node, 'OpenDistribute') + self.OpenDistribute = value_ + self.OpenDistribute_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PermitNumber') + value_ = self.gds_validate_string(value_, node, 'PermitNumber') + self.PermitNumber = value_ + self.PermitNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitZIPCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PermitZIPCode') + value_ = self.gds_validate_string(value_, node, 'PermitZIPCode') + self.PermitZIPCode = value_ + self.PermitZIPCode_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitHolderName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PermitHolderName') + value_ = self.gds_validate_string(value_, node, 'PermitHolderName') + self.PermitHolderName = value_ + self.PermitHolderName_nsprefix_ = child_.prefix + elif nodeName_ == 'CRID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CRID') + value_ = self.gds_validate_string(value_, node, 'CRID') + self.CRID = value_ + self.CRID_nsprefix_ = child_.prefix + elif nodeName_ == 'MID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'MID') + value_ = self.gds_validate_string(value_, node, 'MID') + self.MID = value_ + self.MID_nsprefix_ = child_.prefix + elif nodeName_ == 'LogisticsManagerMID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LogisticsManagerMID') + value_ = self.gds_validate_string(value_, node, 'LogisticsManagerMID') + self.LogisticsManagerMID = value_ + self.LogisticsManagerMID_nsprefix_ = child_.prefix + elif nodeName_ == 'VendorCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'VendorCode') + value_ = self.gds_validate_string(value_, node, 'VendorCode') + self.VendorCode = value_ + self.VendorCode_nsprefix_ = child_.prefix + elif nodeName_ == 'VendorProductVersionNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'VendorProductVersionNumber') + value_ = self.gds_validate_string(value_, node, 'VendorProductVersionNumber') + self.VendorProductVersionNumber = value_ + self.VendorProductVersionNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderName') + value_ = self.gds_validate_string(value_, node, 'SenderName') + self.SenderName = value_ + self.SenderName_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderEMail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderEMail') + value_ = self.gds_validate_string(value_, node, 'SenderEMail') + self.SenderEMail = value_ + self.SenderEMail_nsprefix_ = child_.prefix + elif nodeName_ == 'RecipientName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RecipientName') + value_ = self.gds_validate_string(value_, node, 'RecipientName') + self.RecipientName = value_ + self.RecipientName_nsprefix_ = child_.prefix + elif nodeName_ == 'RecipientEMail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RecipientEMail') + value_ = self.gds_validate_string(value_, node, 'RecipientEMail') + self.RecipientEMail = value_ + self.RecipientEMail_nsprefix_ = child_.prefix + elif nodeName_ == 'ReceiptOption': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ReceiptOption') + value_ = self.gds_validate_string(value_, node, 'ReceiptOption') + self.ReceiptOption = value_ + self.ReceiptOption_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageType') + value_ = self.gds_validate_string(value_, node, 'ImageType') + self.ImageType = value_ + self.ImageType_nsprefix_ = child_.prefix + elif nodeName_ == 'HoldForManifest': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'HoldForManifest') + value_ = self.gds_validate_string(value_, node, 'HoldForManifest') + self.HoldForManifest = value_ + self.HoldForManifest_nsprefix_ = child_.prefix + elif nodeName_ == 'NineDigitRoutingZip': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'NineDigitRoutingZip') + ival_ = self.gds_validate_boolean(ival_, node, 'NineDigitRoutingZip') + self.NineDigitRoutingZip = ival_ + self.NineDigitRoutingZip_nsprefix_ = child_.prefix + elif nodeName_ == 'ShipInfo': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'ShipInfo') + ival_ = self.gds_validate_boolean(ival_, node, 'ShipInfo') + self.ShipInfo = ival_ + self.ShipInfo_nsprefix_ = child_.prefix + elif nodeName_ == 'CarrierRelease': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'CarrierRelease') + ival_ = self.gds_validate_boolean(ival_, node, 'CarrierRelease') + self.CarrierRelease = ival_ + self.CarrierRelease_nsprefix_ = child_.prefix + elif nodeName_ == 'DropOffTime': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'DropOffTime') + ival_ = self.gds_validate_boolean(ival_, node, 'DropOffTime') + self.DropOffTime = ival_ + self.DropOffTime_nsprefix_ = child_.prefix + elif nodeName_ == 'ReturnCommitments': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'ReturnCommitments') + ival_ = self.gds_validate_boolean(ival_, node, 'ReturnCommitments') + self.ReturnCommitments = ival_ + self.ReturnCommitments_nsprefix_ = child_.prefix + elif nodeName_ == 'PrintCustomerRefNo': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PrintCustomerRefNo') + value_ = self.gds_validate_string(value_, node, 'PrintCustomerRefNo') + self.PrintCustomerRefNo = value_ + self.PrintCustomerRefNo_nsprefix_ = child_.prefix + elif nodeName_ == 'Content': + obj_ = ContentType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Content = obj_ + obj_.original_tagname_ = 'Content' + elif nodeName_ == 'ActionCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ActionCode') + value_ = self.gds_validate_string(value_, node, 'ActionCode') + self.ActionCode = value_ + self.ActionCode_nsprefix_ = child_.prefix + elif nodeName_ == 'OptOutOfSPE': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'OptOutOfSPE') + value_ = self.gds_validate_string(value_, node, 'OptOutOfSPE') + self.OptOutOfSPE = value_ + self.OptOutOfSPE_nsprefix_ = child_.prefix + elif nodeName_ == 'SortationLevel': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SortationLevel') + value_ = self.gds_validate_string(value_, node, 'SortationLevel') + self.SortationLevel = value_ + self.SortationLevel_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationEntryFacilityType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DestinationEntryFacilityType') + value_ = self.gds_validate_string(value_, node, 'DestinationEntryFacilityType') + self.DestinationEntryFacilityType = value_ + self.DestinationEntryFacilityType_nsprefix_ = child_.prefix + elif nodeName_ == 'ShippingContents': + obj_ = ShippingContentsType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ShippingContents = obj_ + obj_.original_tagname_ = 'ShippingContents' + elif nodeName_ == 'CustomsContentType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomsContentType') + value_ = self.gds_validate_string(value_, node, 'CustomsContentType') + self.CustomsContentType = value_ + self.CustomsContentType_nsprefix_ = child_.prefix + elif nodeName_ == 'ContentComments': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentComments') + value_ = self.gds_validate_string(value_, node, 'ContentComments') + self.ContentComments = value_ + self.ContentComments_nsprefix_ = child_.prefix + elif nodeName_ == 'RestrictionType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RestrictionType') + value_ = self.gds_validate_string(value_, node, 'RestrictionType') + self.RestrictionType = value_ + self.RestrictionType_nsprefix_ = child_.prefix + elif nodeName_ == 'RestrictionComments': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RestrictionComments') + value_ = self.gds_validate_string(value_, node, 'RestrictionComments') + self.RestrictionComments = value_ + self.RestrictionComments_nsprefix_ = child_.prefix + elif nodeName_ == 'AESITN': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AESITN') + value_ = self.gds_validate_string(value_, node, 'AESITN') + self.AESITN = value_ + self.AESITN_nsprefix_ = child_.prefix + elif nodeName_ == 'ImportersReference': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImportersReference') + value_ = self.gds_validate_string(value_, node, 'ImportersReference') + self.ImportersReference = value_ + self.ImportersReference_nsprefix_ = child_.prefix + elif nodeName_ == 'ImportersContact': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImportersContact') + value_ = self.gds_validate_string(value_, node, 'ImportersContact') + self.ImportersContact = value_ + self.ImportersContact_nsprefix_ = child_.prefix + elif nodeName_ == 'ExportersReference': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ExportersReference') + value_ = self.gds_validate_string(value_, node, 'ExportersReference') + self.ExportersReference = value_ + self.ExportersReference_nsprefix_ = child_.prefix + elif nodeName_ == 'ExportersContact': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ExportersContact') + value_ = self.gds_validate_string(value_, node, 'ExportersContact') + self.ExportersContact = value_ + self.ExportersContact_nsprefix_ = child_.prefix + elif nodeName_ == 'InvoiceNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'InvoiceNumber') + value_ = self.gds_validate_string(value_, node, 'InvoiceNumber') + self.InvoiceNumber = value_ + self.InvoiceNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'LicenseNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LicenseNumber') + value_ = self.gds_validate_string(value_, node, 'LicenseNumber') + self.LicenseNumber = value_ + self.LicenseNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'CertificateNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CertificateNumber') + value_ = self.gds_validate_string(value_, node, 'CertificateNumber') + self.CertificateNumber = value_ + self.CertificateNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'NonDeliveryOption': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'NonDeliveryOption') + value_ = self.gds_validate_string(value_, node, 'NonDeliveryOption') + self.NonDeliveryOption = value_ + self.NonDeliveryOption_nsprefix_ = child_.prefix + elif nodeName_ == 'AltReturnAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AltReturnAddress1') + value_ = self.gds_validate_string(value_, node, 'AltReturnAddress1') + self.AltReturnAddress1 = value_ + self.AltReturnAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'AltReturnAddress2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AltReturnAddress2') + value_ = self.gds_validate_string(value_, node, 'AltReturnAddress2') + self.AltReturnAddress2 = value_ + self.AltReturnAddress2_nsprefix_ = child_.prefix + elif nodeName_ == 'AltReturnAddress3': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AltReturnAddress3') + value_ = self.gds_validate_string(value_, node, 'AltReturnAddress3') + self.AltReturnAddress3 = value_ + self.AltReturnAddress3_nsprefix_ = child_.prefix + elif nodeName_ == 'AltReturnAddress4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AltReturnAddress4') + value_ = self.gds_validate_string(value_, node, 'AltReturnAddress4') + self.AltReturnAddress4 = value_ + self.AltReturnAddress4_nsprefix_ = child_.prefix + elif nodeName_ == 'AltReturnAddress5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AltReturnAddress5') + value_ = self.gds_validate_string(value_, node, 'AltReturnAddress5') + self.AltReturnAddress5 = value_ + self.AltReturnAddress5_nsprefix_ = child_.prefix + elif nodeName_ == 'AltReturnAddress6': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AltReturnAddress6') + value_ = self.gds_validate_string(value_, node, 'AltReturnAddress6') + self.AltReturnAddress6 = value_ + self.AltReturnAddress6_nsprefix_ = child_.prefix + elif nodeName_ == 'AltReturnCountry': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AltReturnCountry') + value_ = self.gds_validate_string(value_, node, 'AltReturnCountry') + self.AltReturnCountry = value_ + self.AltReturnCountry_nsprefix_ = child_.prefix + elif nodeName_ == 'LabelImportType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LabelImportType') + value_ = self.gds_validate_string(value_, node, 'LabelImportType') + self.LabelImportType = value_ + self.LabelImportType_nsprefix_ = child_.prefix + elif nodeName_ == 'ePostageMailerReporting': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ePostageMailerReporting') + value_ = self.gds_validate_string(value_, node, 'ePostageMailerReporting') + self.ePostageMailerReporting = value_ + self.ePostageMailerReporting_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderFirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderFirstName') + value_ = self.gds_validate_string(value_, node, 'SenderFirstName') + self.SenderFirstName = value_ + self.SenderFirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderLastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderLastName') + value_ = self.gds_validate_string(value_, node, 'SenderLastName') + self.SenderLastName = value_ + self.SenderLastName_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderBusinessName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderBusinessName') + value_ = self.gds_validate_string(value_, node, 'SenderBusinessName') + self.SenderBusinessName = value_ + self.SenderBusinessName_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderAddress1') + value_ = self.gds_validate_string(value_, node, 'SenderAddress1') + self.SenderAddress1 = value_ + self.SenderAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderCity') + value_ = self.gds_validate_string(value_, node, 'SenderCity') + self.SenderCity = value_ + self.SenderCity_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderState') + value_ = self.gds_validate_string(value_, node, 'SenderState') + self.SenderState = value_ + self.SenderState_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderZip5': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderZip5') + value_ = self.gds_validate_string(value_, node, 'SenderZip5') + self.SenderZip5 = value_ + self.SenderZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'SenderPhone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SenderPhone') + value_ = self.gds_validate_string(value_, node, 'SenderPhone') + self.SenderPhone = value_ + self.SenderPhone_nsprefix_ = child_.prefix + elif nodeName_ == 'ChargebackCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ChargebackCode') + value_ = self.gds_validate_string(value_, node, 'ChargebackCode') + self.ChargebackCode = value_ + self.ChargebackCode_nsprefix_ = child_.prefix + elif nodeName_ == 'TrackingRetentionPeriod': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'TrackingRetentionPeriod') + value_ = self.gds_validate_string(value_, node, 'TrackingRetentionPeriod') + self.TrackingRetentionPeriod = value_ + self.TrackingRetentionPeriod_nsprefix_ = child_.prefix +# end class eVSRequest + + +class ImageParametersType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ImageParameter=None, XCoordinate=None, YCoordinate=None, LabelSequence=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ImageParameter = ImageParameter + self.ImageParameter_nsprefix_ = None + self.XCoordinate = XCoordinate + self.XCoordinate_nsprefix_ = None + self.YCoordinate = YCoordinate + self.YCoordinate_nsprefix_ = None + self.LabelSequence = LabelSequence + self.LabelSequence_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ImageParametersType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ImageParametersType.subclass: + return ImageParametersType.subclass(*args_, **kwargs_) + else: + return ImageParametersType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ImageParameter(self): + return self.ImageParameter + def set_ImageParameter(self, ImageParameter): + self.ImageParameter = ImageParameter + def get_XCoordinate(self): + return self.XCoordinate + def set_XCoordinate(self, XCoordinate): + self.XCoordinate = XCoordinate + def get_YCoordinate(self): + return self.YCoordinate + def set_YCoordinate(self, YCoordinate): + self.YCoordinate = YCoordinate + def get_LabelSequence(self): + return self.LabelSequence + def set_LabelSequence(self, LabelSequence): + self.LabelSequence = LabelSequence + def has__content(self): + if ( + self.ImageParameter is not None or + self.XCoordinate is not None or + self.YCoordinate is not None or + self.LabelSequence is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ImageParametersType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ImageParametersType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ImageParametersType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ImageParametersType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ImageParametersType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ImageParametersType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ImageParametersType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ImageParameter is not None: + namespaceprefix_ = self.ImageParameter_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageParameter_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageParameter>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageParameter), input_name='ImageParameter')), namespaceprefix_ , eol_)) + if self.XCoordinate is not None: + namespaceprefix_ = self.XCoordinate_nsprefix_ + ':' if (UseCapturedNS_ and self.XCoordinate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sXCoordinate>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.XCoordinate, input_name='XCoordinate'), namespaceprefix_ , eol_)) + if self.YCoordinate is not None: + namespaceprefix_ = self.YCoordinate_nsprefix_ + ':' if (UseCapturedNS_ and self.YCoordinate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sYCoordinate>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.YCoordinate, input_name='YCoordinate'), namespaceprefix_ , eol_)) + if self.LabelSequence is not None: + namespaceprefix_ = self.LabelSequence_nsprefix_ + ':' if (UseCapturedNS_ and self.LabelSequence_nsprefix_) else '' + self.LabelSequence.export(outfile, level, namespaceprefix_, namespacedef_='', name_='LabelSequence', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ImageParameter': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageParameter') + value_ = self.gds_validate_string(value_, node, 'ImageParameter') + self.ImageParameter = value_ + self.ImageParameter_nsprefix_ = child_.prefix + elif nodeName_ == 'XCoordinate' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'XCoordinate') + ival_ = self.gds_validate_integer(ival_, node, 'XCoordinate') + self.XCoordinate = ival_ + self.XCoordinate_nsprefix_ = child_.prefix + elif nodeName_ == 'YCoordinate' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'YCoordinate') + ival_ = self.gds_validate_integer(ival_, node, 'YCoordinate') + self.YCoordinate = ival_ + self.YCoordinate_nsprefix_ = child_.prefix + elif nodeName_ == 'LabelSequence': + obj_ = LabelSequenceType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.LabelSequence = obj_ + obj_.original_tagname_ = 'LabelSequence' +# end class ImageParametersType + + +class LabelSequenceType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, PackageNumber=None, TotalPackages=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.PackageNumber = PackageNumber + self.PackageNumber_nsprefix_ = None + self.TotalPackages = TotalPackages + self.TotalPackages_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, LabelSequenceType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if LabelSequenceType.subclass: + return LabelSequenceType.subclass(*args_, **kwargs_) + else: + return LabelSequenceType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_PackageNumber(self): + return self.PackageNumber + def set_PackageNumber(self, PackageNumber): + self.PackageNumber = PackageNumber + def get_TotalPackages(self): + return self.TotalPackages + def set_TotalPackages(self, TotalPackages): + self.TotalPackages = TotalPackages + def has__content(self): + if ( + self.PackageNumber is not None or + self.TotalPackages is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LabelSequenceType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('LabelSequenceType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'LabelSequenceType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LabelSequenceType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LabelSequenceType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LabelSequenceType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LabelSequenceType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.PackageNumber is not None: + namespaceprefix_ = self.PackageNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.PackageNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPackageNumber>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.PackageNumber, input_name='PackageNumber'), namespaceprefix_ , eol_)) + if self.TotalPackages is not None: + namespaceprefix_ = self.TotalPackages_nsprefix_ + ':' if (UseCapturedNS_ and self.TotalPackages_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTotalPackages>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.TotalPackages, input_name='TotalPackages'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'PackageNumber' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'PackageNumber') + ival_ = self.gds_validate_integer(ival_, node, 'PackageNumber') + self.PackageNumber = ival_ + self.PackageNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'TotalPackages' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'TotalPackages') + ival_ = self.gds_validate_integer(ival_, node, 'TotalPackages') + self.TotalPackages = ival_ + self.TotalPackages_nsprefix_ = child_.prefix +# end class LabelSequenceType + + +class ExpressMailOptionsType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, DeliveryOption=None, WaiverOfSignature=None, eSOFAllowed=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.DeliveryOption = DeliveryOption + self.DeliveryOption_nsprefix_ = None + self.WaiverOfSignature = WaiverOfSignature + self.WaiverOfSignature_nsprefix_ = None + self.eSOFAllowed = eSOFAllowed + self.eSOFAllowed_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExpressMailOptionsType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExpressMailOptionsType.subclass: + return ExpressMailOptionsType.subclass(*args_, **kwargs_) + else: + return ExpressMailOptionsType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_DeliveryOption(self): + return self.DeliveryOption + def set_DeliveryOption(self, DeliveryOption): + self.DeliveryOption = DeliveryOption + def get_WaiverOfSignature(self): + return self.WaiverOfSignature + def set_WaiverOfSignature(self, WaiverOfSignature): + self.WaiverOfSignature = WaiverOfSignature + def get_eSOFAllowed(self): + return self.eSOFAllowed + def set_eSOFAllowed(self, eSOFAllowed): + self.eSOFAllowed = eSOFAllowed + def has__content(self): + if ( + self.DeliveryOption is not None or + self.WaiverOfSignature is not None or + self.eSOFAllowed is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpressMailOptionsType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExpressMailOptionsType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExpressMailOptionsType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpressMailOptionsType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExpressMailOptionsType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExpressMailOptionsType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpressMailOptionsType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.DeliveryOption is not None: + namespaceprefix_ = self.DeliveryOption_nsprefix_ + ':' if (UseCapturedNS_ and self.DeliveryOption_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDeliveryOption>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DeliveryOption), input_name='DeliveryOption')), namespaceprefix_ , eol_)) + if self.WaiverOfSignature is not None: + namespaceprefix_ = self.WaiverOfSignature_nsprefix_ + ':' if (UseCapturedNS_ and self.WaiverOfSignature_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sWaiverOfSignature>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.WaiverOfSignature, input_name='WaiverOfSignature'), namespaceprefix_ , eol_)) + if self.eSOFAllowed is not None: + namespaceprefix_ = self.eSOFAllowed_nsprefix_ + ':' if (UseCapturedNS_ and self.eSOFAllowed_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%seSOFAllowed>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.eSOFAllowed, input_name='eSOFAllowed'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'DeliveryOption': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DeliveryOption') + value_ = self.gds_validate_string(value_, node, 'DeliveryOption') + self.DeliveryOption = value_ + self.DeliveryOption_nsprefix_ = child_.prefix + elif nodeName_ == 'WaiverOfSignature': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'WaiverOfSignature') + ival_ = self.gds_validate_boolean(ival_, node, 'WaiverOfSignature') + self.WaiverOfSignature = ival_ + self.WaiverOfSignature_nsprefix_ = child_.prefix + elif nodeName_ == 'eSOFAllowed': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'eSOFAllowed') + ival_ = self.gds_validate_boolean(ival_, node, 'eSOFAllowed') + self.eSOFAllowed = ival_ + self.eSOFAllowed_nsprefix_ = child_.prefix +# end class ExpressMailOptionsType + + +class ExtraServicesType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ExtraService=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if ExtraService is None: + self.ExtraService = [] + else: + self.ExtraService = ExtraService + self.ExtraService_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExtraServicesType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExtraServicesType.subclass: + return ExtraServicesType.subclass(*args_, **kwargs_) + else: + return ExtraServicesType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ExtraService(self): + return self.ExtraService + def set_ExtraService(self, ExtraService): + self.ExtraService = ExtraService + def add_ExtraService(self, value): + self.ExtraService.append(value) + def insert_ExtraService_at(self, index, value): + self.ExtraService.insert(index, value) + def replace_ExtraService_at(self, index, value): + self.ExtraService[index] = value + def has__content(self): + if ( + self.ExtraService + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExtraServicesType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExtraServicesType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtraServicesType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtraServicesType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtraServicesType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for ExtraService_ in self.ExtraService: + namespaceprefix_ = self.ExtraService_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraService_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sExtraService>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(ExtraService_), input_name='ExtraService')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ExtraService': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ExtraService') + value_ = self.gds_validate_string(value_, node, 'ExtraService') + self.ExtraService.append(value_) + self.ExtraService_nsprefix_ = child_.prefix +# end class ExtraServicesType + + +class ContentType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ContentType_member=None, ContentDescription=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ContentType = ContentType_member + self.ContentType_nsprefix_ = None + self.ContentDescription = ContentDescription + self.ContentDescription_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ContentType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ContentType.subclass: + return ContentType.subclass(*args_, **kwargs_) + else: + return ContentType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ContentType(self): + return self.ContentType + def set_ContentType(self, ContentType): + self.ContentType = ContentType + def get_ContentDescription(self): + return self.ContentDescription + def set_ContentDescription(self, ContentDescription): + self.ContentDescription = ContentDescription + def has__content(self): + if ( + self.ContentType is not None or + self.ContentDescription is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContentType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ContentType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ContentType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContentType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ContentType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ContentType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContentType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ContentType is not None: + namespaceprefix_ = self.ContentType_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentType), input_name='ContentType')), namespaceprefix_ , eol_)) + if self.ContentDescription is not None: + namespaceprefix_ = self.ContentDescription_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentDescription_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentDescription>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentDescription), input_name='ContentDescription')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ContentType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentType') + value_ = self.gds_validate_string(value_, node, 'ContentType') + self.ContentType = value_ + self.ContentType_nsprefix_ = child_.prefix + elif nodeName_ == 'ContentDescription': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentDescription') + value_ = self.gds_validate_string(value_, node, 'ContentDescription') + self.ContentDescription = value_ + self.ContentDescription_nsprefix_ = child_.prefix +# end class ContentType + + +class ShippingContentsType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ItemDetail=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if ItemDetail is None: + self.ItemDetail = [] + else: + self.ItemDetail = ItemDetail + self.ItemDetail_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ShippingContentsType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ShippingContentsType.subclass: + return ShippingContentsType.subclass(*args_, **kwargs_) + else: + return ShippingContentsType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ItemDetail(self): + return self.ItemDetail + def set_ItemDetail(self, ItemDetail): + self.ItemDetail = ItemDetail + def add_ItemDetail(self, value): + self.ItemDetail.append(value) + def insert_ItemDetail_at(self, index, value): + self.ItemDetail.insert(index, value) + def replace_ItemDetail_at(self, index, value): + self.ItemDetail[index] = value + def has__content(self): + if ( + self.ItemDetail + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ShippingContentsType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ShippingContentsType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ShippingContentsType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ShippingContentsType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ShippingContentsType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ShippingContentsType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ShippingContentsType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for ItemDetail_ in self.ItemDetail: + namespaceprefix_ = self.ItemDetail_nsprefix_ + ':' if (UseCapturedNS_ and self.ItemDetail_nsprefix_) else '' + ItemDetail_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ItemDetail', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ItemDetail': + obj_ = ItemDetailType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ItemDetail.append(obj_) + obj_.original_tagname_ = 'ItemDetail' +# end class ShippingContentsType + + +class ItemDetailType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Description=None, Quantity=None, Value=None, NetPounds=None, NetOunces=None, HSTariffNumber=None, CountryOfOrigin=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Description = Description + self.Description_nsprefix_ = None + self.Quantity = Quantity + self.Quantity_nsprefix_ = None + self.Value = Value + self.Value_nsprefix_ = None + self.NetPounds = NetPounds + self.NetPounds_nsprefix_ = None + self.NetOunces = NetOunces + self.NetOunces_nsprefix_ = None + self.HSTariffNumber = HSTariffNumber + self.HSTariffNumber_nsprefix_ = None + self.CountryOfOrigin = CountryOfOrigin + self.CountryOfOrigin_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ItemDetailType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ItemDetailType.subclass: + return ItemDetailType.subclass(*args_, **kwargs_) + else: + return ItemDetailType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Description(self): + return self.Description + def set_Description(self, Description): + self.Description = Description + def get_Quantity(self): + return self.Quantity + def set_Quantity(self, Quantity): + self.Quantity = Quantity + def get_Value(self): + return self.Value + def set_Value(self, Value): + self.Value = Value + def get_NetPounds(self): + return self.NetPounds + def set_NetPounds(self, NetPounds): + self.NetPounds = NetPounds + def get_NetOunces(self): + return self.NetOunces + def set_NetOunces(self, NetOunces): + self.NetOunces = NetOunces + def get_HSTariffNumber(self): + return self.HSTariffNumber + def set_HSTariffNumber(self, HSTariffNumber): + self.HSTariffNumber = HSTariffNumber + def get_CountryOfOrigin(self): + return self.CountryOfOrigin + def set_CountryOfOrigin(self, CountryOfOrigin): + self.CountryOfOrigin = CountryOfOrigin + def has__content(self): + if ( + self.Description is not None or + self.Quantity is not None or + self.Value is not None or + self.NetPounds is not None or + self.NetOunces is not None or + self.HSTariffNumber is not None or + self.CountryOfOrigin is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ItemDetailType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ItemDetailType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ItemDetailType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ItemDetailType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ItemDetailType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ItemDetailType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ItemDetailType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Description is not None: + namespaceprefix_ = self.Description_nsprefix_ + ':' if (UseCapturedNS_ and self.Description_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDescription>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Description), input_name='Description')), namespaceprefix_ , eol_)) + if self.Quantity is not None: + namespaceprefix_ = self.Quantity_nsprefix_ + ':' if (UseCapturedNS_ and self.Quantity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sQuantity>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Quantity, input_name='Quantity'), namespaceprefix_ , eol_)) + if self.Value is not None: + namespaceprefix_ = self.Value_nsprefix_ + ':' if (UseCapturedNS_ and self.Value_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sValue>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Value, input_name='Value'), namespaceprefix_ , eol_)) + if self.NetPounds is not None: + namespaceprefix_ = self.NetPounds_nsprefix_ + ':' if (UseCapturedNS_ and self.NetPounds_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNetPounds>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.NetPounds), input_name='NetPounds')), namespaceprefix_ , eol_)) + if self.NetOunces is not None: + namespaceprefix_ = self.NetOunces_nsprefix_ + ':' if (UseCapturedNS_ and self.NetOunces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNetOunces>%s%s' % (namespaceprefix_ , self.gds_format_float(self.NetOunces, input_name='NetOunces'), namespaceprefix_ , eol_)) + if self.HSTariffNumber is not None: + namespaceprefix_ = self.HSTariffNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.HSTariffNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHSTariffNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.HSTariffNumber), input_name='HSTariffNumber')), namespaceprefix_ , eol_)) + if self.CountryOfOrigin is not None: + namespaceprefix_ = self.CountryOfOrigin_nsprefix_ + ':' if (UseCapturedNS_ and self.CountryOfOrigin_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCountryOfOrigin>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CountryOfOrigin), input_name='CountryOfOrigin')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Description': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Description') + value_ = self.gds_validate_string(value_, node, 'Description') + self.Description = value_ + self.Description_nsprefix_ = child_.prefix + elif nodeName_ == 'Quantity' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Quantity') + ival_ = self.gds_validate_integer(ival_, node, 'Quantity') + self.Quantity = ival_ + self.Quantity_nsprefix_ = child_.prefix + elif nodeName_ == 'Value' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Value') + fval_ = self.gds_validate_float(fval_, node, 'Value') + self.Value = fval_ + self.Value_nsprefix_ = child_.prefix + elif nodeName_ == 'NetPounds': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'NetPounds') + value_ = self.gds_validate_string(value_, node, 'NetPounds') + self.NetPounds = value_ + self.NetPounds_nsprefix_ = child_.prefix + elif nodeName_ == 'NetOunces' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'NetOunces') + fval_ = self.gds_validate_float(fval_, node, 'NetOunces') + self.NetOunces = fval_ + self.NetOunces_nsprefix_ = child_.prefix + elif nodeName_ == 'HSTariffNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'HSTariffNumber') + value_ = self.gds_validate_string(value_, node, 'HSTariffNumber') + self.HSTariffNumber = value_ + self.HSTariffNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'CountryOfOrigin': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CountryOfOrigin') + value_ = self.gds_validate_string(value_, node, 'CountryOfOrigin') + self.CountryOfOrigin = value_ + self.CountryOfOrigin_nsprefix_ = child_.prefix +# end class ItemDetailType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSRequest' + rootClass = eVSRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSRequest' + rootClass = eVSRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSRequest' + rootClass = eVSRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSRequest' + rootClass = eVSRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from evs_request import *\n\n') + sys.stdout.write('import evs_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "ContentType", + "ExpressMailOptionsType", + "ExtraServicesType", + "ImageParametersType", + "ItemDetailType", + "LabelSequenceType", + "ShippingContentsType", + "eVSRequest" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/evs_response.py b/modules/connectors/usps_international/karrio/schemas/usps_international/evs_response.py new file mode 100644 index 0000000000..7327cb76be --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/evs_response.py @@ -0,0 +1,1979 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:10 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/evs_response.py') +# +# Command line arguments: +# ./schemas/eVSResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/evs_response.py" ./schemas/eVSResponse.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class eVSResponse(GeneratedsSuper): + """LabelImage -- over 115000 suppressed + + """ + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, BarcodeNumber=None, LabelImage=None, ToName=None, ToFirm=None, ToAddress1=None, ToAddress2=None, ToCity=None, ToState=None, ToZip5=None, ToZip4=None, Postnet=None, RDC=None, Postage=None, ExtraServices=None, Zone=None, CarrierRoute=None, PermitHolderName=None, InductionType=None, LogMessage=None, Commitment=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.BarcodeNumber = BarcodeNumber + self.BarcodeNumber_nsprefix_ = None + self.LabelImage = LabelImage + self.LabelImage_nsprefix_ = None + self.ToName = ToName + self.ToName_nsprefix_ = None + self.ToFirm = ToFirm + self.ToFirm_nsprefix_ = None + self.ToAddress1 = ToAddress1 + self.ToAddress1_nsprefix_ = None + self.ToAddress2 = ToAddress2 + self.ToAddress2_nsprefix_ = None + self.ToCity = ToCity + self.ToCity_nsprefix_ = None + self.ToState = ToState + self.ToState_nsprefix_ = None + self.ToZip5 = ToZip5 + self.ToZip5_nsprefix_ = None + self.ToZip4 = ToZip4 + self.ToZip4_nsprefix_ = None + self.Postnet = Postnet + self.Postnet_nsprefix_ = None + self.RDC = RDC + self.RDC_nsprefix_ = None + self.Postage = Postage + self.Postage_nsprefix_ = None + self.ExtraServices = ExtraServices + self.ExtraServices_nsprefix_ = None + self.Zone = Zone + self.Zone_nsprefix_ = None + self.CarrierRoute = CarrierRoute + self.CarrierRoute_nsprefix_ = None + self.PermitHolderName = PermitHolderName + self.PermitHolderName_nsprefix_ = None + self.InductionType = InductionType + self.InductionType_nsprefix_ = None + self.LogMessage = LogMessage + self.LogMessage_nsprefix_ = None + self.Commitment = Commitment + self.Commitment_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, eVSResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if eVSResponse.subclass: + return eVSResponse.subclass(*args_, **kwargs_) + else: + return eVSResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_BarcodeNumber(self): + return self.BarcodeNumber + def set_BarcodeNumber(self, BarcodeNumber): + self.BarcodeNumber = BarcodeNumber + def get_LabelImage(self): + return self.LabelImage + def set_LabelImage(self, LabelImage): + self.LabelImage = LabelImage + def get_ToName(self): + return self.ToName + def set_ToName(self, ToName): + self.ToName = ToName + def get_ToFirm(self): + return self.ToFirm + def set_ToFirm(self, ToFirm): + self.ToFirm = ToFirm + def get_ToAddress1(self): + return self.ToAddress1 + def set_ToAddress1(self, ToAddress1): + self.ToAddress1 = ToAddress1 + def get_ToAddress2(self): + return self.ToAddress2 + def set_ToAddress2(self, ToAddress2): + self.ToAddress2 = ToAddress2 + def get_ToCity(self): + return self.ToCity + def set_ToCity(self, ToCity): + self.ToCity = ToCity + def get_ToState(self): + return self.ToState + def set_ToState(self, ToState): + self.ToState = ToState + def get_ToZip5(self): + return self.ToZip5 + def set_ToZip5(self, ToZip5): + self.ToZip5 = ToZip5 + def get_ToZip4(self): + return self.ToZip4 + def set_ToZip4(self, ToZip4): + self.ToZip4 = ToZip4 + def get_Postnet(self): + return self.Postnet + def set_Postnet(self, Postnet): + self.Postnet = Postnet + def get_RDC(self): + return self.RDC + def set_RDC(self, RDC): + self.RDC = RDC + def get_Postage(self): + return self.Postage + def set_Postage(self, Postage): + self.Postage = Postage + def get_ExtraServices(self): + return self.ExtraServices + def set_ExtraServices(self, ExtraServices): + self.ExtraServices = ExtraServices + def get_Zone(self): + return self.Zone + def set_Zone(self, Zone): + self.Zone = Zone + def get_CarrierRoute(self): + return self.CarrierRoute + def set_CarrierRoute(self, CarrierRoute): + self.CarrierRoute = CarrierRoute + def get_PermitHolderName(self): + return self.PermitHolderName + def set_PermitHolderName(self, PermitHolderName): + self.PermitHolderName = PermitHolderName + def get_InductionType(self): + return self.InductionType + def set_InductionType(self, InductionType): + self.InductionType = InductionType + def get_LogMessage(self): + return self.LogMessage + def set_LogMessage(self, LogMessage): + self.LogMessage = LogMessage + def get_Commitment(self): + return self.Commitment + def set_Commitment(self, Commitment): + self.Commitment = Commitment + def has__content(self): + if ( + self.BarcodeNumber is not None or + self.LabelImage is not None or + self.ToName is not None or + self.ToFirm is not None or + self.ToAddress1 is not None or + self.ToAddress2 is not None or + self.ToCity is not None or + self.ToState is not None or + self.ToZip5 is not None or + self.ToZip4 is not None or + self.Postnet is not None or + self.RDC is not None or + self.Postage is not None or + self.ExtraServices is not None or + self.Zone is not None or + self.CarrierRoute is not None or + self.PermitHolderName is not None or + self.InductionType is not None or + self.LogMessage is not None or + self.Commitment is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('eVSResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'eVSResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='eVSResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='eVSResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='eVSResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.BarcodeNumber is not None: + namespaceprefix_ = self.BarcodeNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.BarcodeNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBarcodeNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BarcodeNumber), input_name='BarcodeNumber')), namespaceprefix_ , eol_)) + if self.LabelImage is not None: + namespaceprefix_ = self.LabelImage_nsprefix_ + ':' if (UseCapturedNS_ and self.LabelImage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLabelImage>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LabelImage), input_name='LabelImage')), namespaceprefix_ , eol_)) + if self.ToName is not None: + namespaceprefix_ = self.ToName_nsprefix_ + ':' if (UseCapturedNS_ and self.ToName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToName), input_name='ToName')), namespaceprefix_ , eol_)) + if self.ToFirm is not None: + namespaceprefix_ = self.ToFirm_nsprefix_ + ':' if (UseCapturedNS_ and self.ToFirm_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToFirm>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToFirm), input_name='ToFirm')), namespaceprefix_ , eol_)) + if self.ToAddress1 is not None: + namespaceprefix_ = self.ToAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress1), input_name='ToAddress1')), namespaceprefix_ , eol_)) + if self.ToAddress2 is not None: + namespaceprefix_ = self.ToAddress2_nsprefix_ + ':' if (UseCapturedNS_ and self.ToAddress2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToAddress2), input_name='ToAddress2')), namespaceprefix_ , eol_)) + if self.ToCity is not None: + namespaceprefix_ = self.ToCity_nsprefix_ + ':' if (UseCapturedNS_ and self.ToCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToCity), input_name='ToCity')), namespaceprefix_ , eol_)) + if self.ToState is not None: + namespaceprefix_ = self.ToState_nsprefix_ + ':' if (UseCapturedNS_ and self.ToState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ToState), input_name='ToState')), namespaceprefix_ , eol_)) + if self.ToZip5 is not None: + namespaceprefix_ = self.ToZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.ToZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToZip5>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ToZip5, input_name='ToZip5'), namespaceprefix_ , eol_)) + if self.ToZip4 is not None: + namespaceprefix_ = self.ToZip4_nsprefix_ + ':' if (UseCapturedNS_ and self.ToZip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sToZip4>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ToZip4, input_name='ToZip4'), namespaceprefix_ , eol_)) + if self.Postnet is not None: + namespaceprefix_ = self.Postnet_nsprefix_ + ':' if (UseCapturedNS_ and self.Postnet_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPostnet>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Postnet, input_name='Postnet'), namespaceprefix_ , eol_)) + if self.RDC is not None: + namespaceprefix_ = self.RDC_nsprefix_ + ':' if (UseCapturedNS_ and self.RDC_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRDC>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RDC), input_name='RDC')), namespaceprefix_ , eol_)) + if self.Postage is not None: + namespaceprefix_ = self.Postage_nsprefix_ + ':' if (UseCapturedNS_ and self.Postage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPostage>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Postage, input_name='Postage'), namespaceprefix_ , eol_)) + if self.ExtraServices is not None: + namespaceprefix_ = self.ExtraServices_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraServices_nsprefix_) else '' + self.ExtraServices.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ExtraServices', pretty_print=pretty_print) + if self.Zone is not None: + namespaceprefix_ = self.Zone_nsprefix_ + ':' if (UseCapturedNS_ and self.Zone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Zone), input_name='Zone')), namespaceprefix_ , eol_)) + if self.CarrierRoute is not None: + namespaceprefix_ = self.CarrierRoute_nsprefix_ + ':' if (UseCapturedNS_ and self.CarrierRoute_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCarrierRoute>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CarrierRoute), input_name='CarrierRoute')), namespaceprefix_ , eol_)) + if self.PermitHolderName is not None: + namespaceprefix_ = self.PermitHolderName_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitHolderName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitHolderName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PermitHolderName), input_name='PermitHolderName')), namespaceprefix_ , eol_)) + if self.InductionType is not None: + namespaceprefix_ = self.InductionType_nsprefix_ + ':' if (UseCapturedNS_ and self.InductionType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInductionType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InductionType), input_name='InductionType')), namespaceprefix_ , eol_)) + if self.LogMessage is not None: + namespaceprefix_ = self.LogMessage_nsprefix_ + ':' if (UseCapturedNS_ and self.LogMessage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLogMessage>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LogMessage), input_name='LogMessage')), namespaceprefix_ , eol_)) + if self.Commitment is not None: + namespaceprefix_ = self.Commitment_nsprefix_ + ':' if (UseCapturedNS_ and self.Commitment_nsprefix_) else '' + self.Commitment.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Commitment', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'BarcodeNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BarcodeNumber') + value_ = self.gds_validate_string(value_, node, 'BarcodeNumber') + self.BarcodeNumber = value_ + self.BarcodeNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'LabelImage': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LabelImage') + value_ = self.gds_validate_string(value_, node, 'LabelImage') + self.LabelImage = value_ + self.LabelImage_nsprefix_ = child_.prefix + elif nodeName_ == 'ToName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToName') + value_ = self.gds_validate_string(value_, node, 'ToName') + self.ToName = value_ + self.ToName_nsprefix_ = child_.prefix + elif nodeName_ == 'ToFirm': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToFirm') + value_ = self.gds_validate_string(value_, node, 'ToFirm') + self.ToFirm = value_ + self.ToFirm_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress1') + value_ = self.gds_validate_string(value_, node, 'ToAddress1') + self.ToAddress1 = value_ + self.ToAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'ToAddress2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToAddress2') + value_ = self.gds_validate_string(value_, node, 'ToAddress2') + self.ToAddress2 = value_ + self.ToAddress2_nsprefix_ = child_.prefix + elif nodeName_ == 'ToCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToCity') + value_ = self.gds_validate_string(value_, node, 'ToCity') + self.ToCity = value_ + self.ToCity_nsprefix_ = child_.prefix + elif nodeName_ == 'ToState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ToState') + value_ = self.gds_validate_string(value_, node, 'ToState') + self.ToState = value_ + self.ToState_nsprefix_ = child_.prefix + elif nodeName_ == 'ToZip5' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ToZip5') + ival_ = self.gds_validate_integer(ival_, node, 'ToZip5') + self.ToZip5 = ival_ + self.ToZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'ToZip4' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ToZip4') + ival_ = self.gds_validate_integer(ival_, node, 'ToZip4') + self.ToZip4 = ival_ + self.ToZip4_nsprefix_ = child_.prefix + elif nodeName_ == 'Postnet' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Postnet') + ival_ = self.gds_validate_integer(ival_, node, 'Postnet') + self.Postnet = ival_ + self.Postnet_nsprefix_ = child_.prefix + elif nodeName_ == 'RDC': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RDC') + value_ = self.gds_validate_string(value_, node, 'RDC') + self.RDC = value_ + self.RDC_nsprefix_ = child_.prefix + elif nodeName_ == 'Postage' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Postage') + fval_ = self.gds_validate_float(fval_, node, 'Postage') + self.Postage = fval_ + self.Postage_nsprefix_ = child_.prefix + elif nodeName_ == 'ExtraServices': + obj_ = ExtraServicesType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ExtraServices = obj_ + obj_.original_tagname_ = 'ExtraServices' + elif nodeName_ == 'Zone': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Zone') + value_ = self.gds_validate_string(value_, node, 'Zone') + self.Zone = value_ + self.Zone_nsprefix_ = child_.prefix + elif nodeName_ == 'CarrierRoute': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CarrierRoute') + value_ = self.gds_validate_string(value_, node, 'CarrierRoute') + self.CarrierRoute = value_ + self.CarrierRoute_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitHolderName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PermitHolderName') + value_ = self.gds_validate_string(value_, node, 'PermitHolderName') + self.PermitHolderName = value_ + self.PermitHolderName_nsprefix_ = child_.prefix + elif nodeName_ == 'InductionType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'InductionType') + value_ = self.gds_validate_string(value_, node, 'InductionType') + self.InductionType = value_ + self.InductionType_nsprefix_ = child_.prefix + elif nodeName_ == 'LogMessage': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LogMessage') + value_ = self.gds_validate_string(value_, node, 'LogMessage') + self.LogMessage = value_ + self.LogMessage_nsprefix_ = child_.prefix + elif nodeName_ == 'Commitment': + obj_ = CommitmentType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Commitment = obj_ + obj_.original_tagname_ = 'Commitment' +# end class eVSResponse + + +class ExtraServicesType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ExtraService=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if ExtraService is None: + self.ExtraService = [] + else: + self.ExtraService = ExtraService + self.ExtraService_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExtraServicesType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExtraServicesType.subclass: + return ExtraServicesType.subclass(*args_, **kwargs_) + else: + return ExtraServicesType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ExtraService(self): + return self.ExtraService + def set_ExtraService(self, ExtraService): + self.ExtraService = ExtraService + def add_ExtraService(self, value): + self.ExtraService.append(value) + def insert_ExtraService_at(self, index, value): + self.ExtraService.insert(index, value) + def replace_ExtraService_at(self, index, value): + self.ExtraService[index] = value + def has__content(self): + if ( + self.ExtraService + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExtraServicesType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExtraServicesType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtraServicesType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtraServicesType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtraServicesType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for ExtraService_ in self.ExtraService: + namespaceprefix_ = self.ExtraService_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraService_nsprefix_) else '' + ExtraService_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ExtraService', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ExtraService': + obj_ = ExtraServiceType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ExtraService.append(obj_) + obj_.original_tagname_ = 'ExtraService' +# end class ExtraServicesType + + +class ExtraServiceType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ServiceID=None, ServiceName=None, Price=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ServiceID = ServiceID + self.ServiceID_nsprefix_ = None + self.ServiceName = ServiceName + self.ServiceName_nsprefix_ = None + self.Price = Price + self.Price_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExtraServiceType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExtraServiceType.subclass: + return ExtraServiceType.subclass(*args_, **kwargs_) + else: + return ExtraServiceType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ServiceID(self): + return self.ServiceID + def set_ServiceID(self, ServiceID): + self.ServiceID = ServiceID + def get_ServiceName(self): + return self.ServiceName + def set_ServiceName(self, ServiceName): + self.ServiceName = ServiceName + def get_Price(self): + return self.Price + def set_Price(self, Price): + self.Price = Price + def has__content(self): + if ( + self.ServiceID is not None or + self.ServiceName is not None or + self.Price is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServiceType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExtraServiceType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExtraServiceType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtraServiceType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtraServiceType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtraServiceType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServiceType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ServiceID is not None: + namespaceprefix_ = self.ServiceID_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceID>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ServiceID, input_name='ServiceID'), namespaceprefix_ , eol_)) + if self.ServiceName is not None: + namespaceprefix_ = self.ServiceName_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceName), input_name='ServiceName')), namespaceprefix_ , eol_)) + if self.Price is not None: + namespaceprefix_ = self.Price_nsprefix_ + ':' if (UseCapturedNS_ and self.Price_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPrice>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Price, input_name='Price'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ServiceID' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ServiceID') + ival_ = self.gds_validate_integer(ival_, node, 'ServiceID') + self.ServiceID = ival_ + self.ServiceID_nsprefix_ = child_.prefix + elif nodeName_ == 'ServiceName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceName') + value_ = self.gds_validate_string(value_, node, 'ServiceName') + self.ServiceName = value_ + self.ServiceName_nsprefix_ = child_.prefix + elif nodeName_ == 'Price' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Price') + fval_ = self.gds_validate_float(fval_, node, 'Price') + self.Price = fval_ + self.Price_nsprefix_ = child_.prefix +# end class ExtraServiceType + + +class CommitmentType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, CommitmentName=None, ScheduledDeliveryDate=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.CommitmentName = CommitmentName + self.CommitmentName_nsprefix_ = None + self.ScheduledDeliveryDate = ScheduledDeliveryDate + self.ScheduledDeliveryDate_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CommitmentType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CommitmentType.subclass: + return CommitmentType.subclass(*args_, **kwargs_) + else: + return CommitmentType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_CommitmentName(self): + return self.CommitmentName + def set_CommitmentName(self, CommitmentName): + self.CommitmentName = CommitmentName + def get_ScheduledDeliveryDate(self): + return self.ScheduledDeliveryDate + def set_ScheduledDeliveryDate(self, ScheduledDeliveryDate): + self.ScheduledDeliveryDate = ScheduledDeliveryDate + def has__content(self): + if ( + self.CommitmentName is not None or + self.ScheduledDeliveryDate is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CommitmentType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CommitmentType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CommitmentType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CommitmentType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CommitmentType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CommitmentType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CommitmentType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.CommitmentName is not None: + namespaceprefix_ = self.CommitmentName_nsprefix_ + ':' if (UseCapturedNS_ and self.CommitmentName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommitmentName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CommitmentName), input_name='CommitmentName')), namespaceprefix_ , eol_)) + if self.ScheduledDeliveryDate is not None: + namespaceprefix_ = self.ScheduledDeliveryDate_nsprefix_ + ':' if (UseCapturedNS_ and self.ScheduledDeliveryDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sScheduledDeliveryDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ScheduledDeliveryDate), input_name='ScheduledDeliveryDate')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'CommitmentName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CommitmentName') + value_ = self.gds_validate_string(value_, node, 'CommitmentName') + self.CommitmentName = value_ + self.CommitmentName_nsprefix_ = child_.prefix + elif nodeName_ == 'ScheduledDeliveryDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ScheduledDeliveryDate') + value_ = self.gds_validate_string(value_, node, 'ScheduledDeliveryDate') + self.ScheduledDeliveryDate = value_ + self.ScheduledDeliveryDate_nsprefix_ = child_.prefix +# end class CommitmentType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSResponse' + rootClass = eVSResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSResponse' + rootClass = eVSResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSResponse' + rootClass = eVSResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSResponse' + rootClass = eVSResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from evs_response import *\n\n') + sys.stdout.write('import evs_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CommitmentType", + "ExtraServiceType", + "ExtraServicesType", + "eVSResponse" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/evsi_cancel_request.py b/modules/connectors/usps_international/karrio/schemas/usps_international/evsi_cancel_request.py new file mode 100644 index 0000000000..1d6fb960fe --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/evsi_cancel_request.py @@ -0,0 +1,1335 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:12 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/evsi_cancel_request.py') +# +# Command line arguments: +# ./schemas/eVSICancelRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/evsi_cancel_request.py" ./schemas/eVSICancelRequest.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class eVSICancelRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, BarcodeNumber=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.BarcodeNumber = BarcodeNumber + self.BarcodeNumber_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, eVSICancelRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if eVSICancelRequest.subclass: + return eVSICancelRequest.subclass(*args_, **kwargs_) + else: + return eVSICancelRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_BarcodeNumber(self): + return self.BarcodeNumber + def set_BarcodeNumber(self, BarcodeNumber): + self.BarcodeNumber = BarcodeNumber + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.BarcodeNumber is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSICancelRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('eVSICancelRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'eVSICancelRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='eVSICancelRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='eVSICancelRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='eVSICancelRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSICancelRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.BarcodeNumber is not None: + namespaceprefix_ = self.BarcodeNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.BarcodeNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBarcodeNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BarcodeNumber), input_name='BarcodeNumber')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'BarcodeNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BarcodeNumber') + value_ = self.gds_validate_string(value_, node, 'BarcodeNumber') + self.BarcodeNumber = value_ + self.BarcodeNumber_nsprefix_ = child_.prefix +# end class eVSICancelRequest + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSICancelRequest' + rootClass = eVSICancelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSICancelRequest' + rootClass = eVSICancelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSICancelRequest' + rootClass = eVSICancelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSICancelRequest' + rootClass = eVSICancelRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from evsi_cancel_request import *\n\n') + sys.stdout.write('import evsi_cancel_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "eVSICancelRequest" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/evsi_cancel_response.py b/modules/connectors/usps_international/karrio/schemas/usps_international/evsi_cancel_response.py new file mode 100644 index 0000000000..06718e5403 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/evsi_cancel_response.py @@ -0,0 +1,1345 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:12 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/evsi_cancel_response.py') +# +# Command line arguments: +# ./schemas/eVSICancelResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/evsi_cancel_response.py" ./schemas/eVSICancelResponse.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class eVSICancelResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, BarcodeNumber=None, Status=None, Reason=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.BarcodeNumber = BarcodeNumber + self.BarcodeNumber_nsprefix_ = None + self.Status = Status + self.Status_nsprefix_ = None + self.Reason = Reason + self.Reason_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, eVSICancelResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if eVSICancelResponse.subclass: + return eVSICancelResponse.subclass(*args_, **kwargs_) + else: + return eVSICancelResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_BarcodeNumber(self): + return self.BarcodeNumber + def set_BarcodeNumber(self, BarcodeNumber): + self.BarcodeNumber = BarcodeNumber + def get_Status(self): + return self.Status + def set_Status(self, Status): + self.Status = Status + def get_Reason(self): + return self.Reason + def set_Reason(self, Reason): + self.Reason = Reason + def has__content(self): + if ( + self.BarcodeNumber is not None or + self.Status is not None or + self.Reason is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSICancelResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('eVSICancelResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'eVSICancelResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='eVSICancelResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='eVSICancelResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='eVSICancelResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='eVSICancelResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.BarcodeNumber is not None: + namespaceprefix_ = self.BarcodeNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.BarcodeNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sBarcodeNumber>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.BarcodeNumber), input_name='BarcodeNumber')), namespaceprefix_ , eol_)) + if self.Status is not None: + namespaceprefix_ = self.Status_nsprefix_ + ':' if (UseCapturedNS_ and self.Status_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sStatus>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Status), input_name='Status')), namespaceprefix_ , eol_)) + if self.Reason is not None: + namespaceprefix_ = self.Reason_nsprefix_ + ':' if (UseCapturedNS_ and self.Reason_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sReason>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Reason), input_name='Reason')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'BarcodeNumber': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'BarcodeNumber') + value_ = self.gds_validate_string(value_, node, 'BarcodeNumber') + self.BarcodeNumber = value_ + self.BarcodeNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'Status': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Status') + value_ = self.gds_validate_string(value_, node, 'Status') + self.Status = value_ + self.Status_nsprefix_ = child_.prefix + elif nodeName_ == 'Reason': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Reason') + value_ = self.gds_validate_string(value_, node, 'Reason') + self.Reason = value_ + self.Reason_nsprefix_ = child_.prefix +# end class eVSICancelResponse + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSICancelResponse' + rootClass = eVSICancelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSICancelResponse' + rootClass = eVSICancelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSICancelResponse' + rootClass = eVSICancelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'eVSICancelResponse' + rootClass = eVSICancelResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from evsi_cancel_response import *\n\n') + sys.stdout.write('import evsi_cancel_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "eVSICancelResponse" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/express_mail_commitment_request.py b/modules/connectors/usps_international/karrio/schemas/usps_international/express_mail_commitment_request.py new file mode 100644 index 0000000000..d5b7646d2a --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/express_mail_commitment_request.py @@ -0,0 +1,1437 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:04 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/express_mail_commitment_request.py') +# +# Command line arguments: +# ./schemas/ExpressMailCommitmentRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/express_mail_commitment_request.py" ./schemas/ExpressMailCommitmentRequest.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class ExpressMailCommitmentRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, OriginZip=None, DestinationZip=None, Date=None, DropOffTime=None, ReturnDates=None, PMGuarantee=None, ClientType=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.OriginZip = OriginZip + self.OriginZip_nsprefix_ = None + self.DestinationZip = DestinationZip + self.DestinationZip_nsprefix_ = None + self.Date = Date + self.Date_nsprefix_ = None + self.DropOffTime = DropOffTime + self.DropOffTime_nsprefix_ = None + self.ReturnDates = ReturnDates + self.ReturnDates_nsprefix_ = None + self.PMGuarantee = PMGuarantee + self.PMGuarantee_nsprefix_ = None + self.ClientType = ClientType + self.ClientType_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExpressMailCommitmentRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExpressMailCommitmentRequest.subclass: + return ExpressMailCommitmentRequest.subclass(*args_, **kwargs_) + else: + return ExpressMailCommitmentRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_OriginZip(self): + return self.OriginZip + def set_OriginZip(self, OriginZip): + self.OriginZip = OriginZip + def get_DestinationZip(self): + return self.DestinationZip + def set_DestinationZip(self, DestinationZip): + self.DestinationZip = DestinationZip + def get_Date(self): + return self.Date + def set_Date(self, Date): + self.Date = Date + def get_DropOffTime(self): + return self.DropOffTime + def set_DropOffTime(self, DropOffTime): + self.DropOffTime = DropOffTime + def get_ReturnDates(self): + return self.ReturnDates + def set_ReturnDates(self, ReturnDates): + self.ReturnDates = ReturnDates + def get_PMGuarantee(self): + return self.PMGuarantee + def set_PMGuarantee(self, PMGuarantee): + self.PMGuarantee = PMGuarantee + def get_ClientType(self): + return self.ClientType + def set_ClientType(self, ClientType): + self.ClientType = ClientType + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.OriginZip is not None or + self.DestinationZip is not None or + self.Date is not None or + self.DropOffTime is not None or + self.ReturnDates is not None or + self.PMGuarantee is not None or + self.ClientType is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpressMailCommitmentRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExpressMailCommitmentRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExpressMailCommitmentRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpressMailCommitmentRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExpressMailCommitmentRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExpressMailCommitmentRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpressMailCommitmentRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.OriginZip is not None: + namespaceprefix_ = self.OriginZip_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.OriginZip, input_name='OriginZip'), namespaceprefix_ , eol_)) + if self.DestinationZip is not None: + namespaceprefix_ = self.DestinationZip_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.DestinationZip, input_name='DestinationZip'), namespaceprefix_ , eol_)) + if self.Date is not None: + namespaceprefix_ = self.Date_nsprefix_ + ':' if (UseCapturedNS_ and self.Date_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Date), input_name='Date')), namespaceprefix_ , eol_)) + if self.DropOffTime is not None: + namespaceprefix_ = self.DropOffTime_nsprefix_ + ':' if (UseCapturedNS_ and self.DropOffTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDropOffTime>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DropOffTime), input_name='DropOffTime')), namespaceprefix_ , eol_)) + if self.ReturnDates is not None: + namespaceprefix_ = self.ReturnDates_nsprefix_ + ':' if (UseCapturedNS_ and self.ReturnDates_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sReturnDates>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ReturnDates), input_name='ReturnDates')), namespaceprefix_ , eol_)) + if self.PMGuarantee is not None: + namespaceprefix_ = self.PMGuarantee_nsprefix_ + ':' if (UseCapturedNS_ and self.PMGuarantee_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPMGuarantee>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PMGuarantee), input_name='PMGuarantee')), namespaceprefix_ , eol_)) + if self.ClientType is not None: + namespaceprefix_ = self.ClientType_nsprefix_ + ':' if (UseCapturedNS_ and self.ClientType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sClientType>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ClientType, input_name='ClientType'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'OriginZip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'OriginZip') + ival_ = self.gds_validate_integer(ival_, node, 'OriginZip') + self.OriginZip = ival_ + self.OriginZip_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationZip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'DestinationZip') + ival_ = self.gds_validate_integer(ival_, node, 'DestinationZip') + self.DestinationZip = ival_ + self.DestinationZip_nsprefix_ = child_.prefix + elif nodeName_ == 'Date': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Date') + value_ = self.gds_validate_string(value_, node, 'Date') + self.Date = value_ + self.Date_nsprefix_ = child_.prefix + elif nodeName_ == 'DropOffTime': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DropOffTime') + value_ = self.gds_validate_string(value_, node, 'DropOffTime') + self.DropOffTime = value_ + self.DropOffTime_nsprefix_ = child_.prefix + elif nodeName_ == 'ReturnDates': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ReturnDates') + value_ = self.gds_validate_string(value_, node, 'ReturnDates') + self.ReturnDates = value_ + self.ReturnDates_nsprefix_ = child_.prefix + elif nodeName_ == 'PMGuarantee': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PMGuarantee') + value_ = self.gds_validate_string(value_, node, 'PMGuarantee') + self.PMGuarantee = value_ + self.PMGuarantee_nsprefix_ = child_.prefix + elif nodeName_ == 'ClientType' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ClientType') + ival_ = self.gds_validate_integer(ival_, node, 'ClientType') + self.ClientType = ival_ + self.ClientType_nsprefix_ = child_.prefix +# end class ExpressMailCommitmentRequest + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ExpressMailCommitmentRequest' + rootClass = ExpressMailCommitmentRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ExpressMailCommitmentRequest' + rootClass = ExpressMailCommitmentRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ExpressMailCommitmentRequest' + rootClass = ExpressMailCommitmentRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ExpressMailCommitmentRequest' + rootClass = ExpressMailCommitmentRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from express_mail_commitment_request import *\n\n') + sys.stdout.write('import express_mail_commitment_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "ExpressMailCommitmentRequest" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/express_mail_commitment_response.py b/modules/connectors/usps_international/karrio/schemas/usps_international/express_mail_commitment_response.py new file mode 100644 index 0000000000..9ee21bed83 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/express_mail_commitment_response.py @@ -0,0 +1,1846 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:04 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/express_mail_commitment_response.py') +# +# Command line arguments: +# ./schemas/ExpressMailCommitmentResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/express_mail_commitment_response.py" ./schemas/ExpressMailCommitmentResponse.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class ExpressMailCommitmentResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, OriginZIP=None, OriginCity=None, OriginState=None, DestinationZIP=None, DestinationCity=None, DestinationState=None, Date=None, Time=None, EffectiveAcceptanceDate=None, Commitment=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.OriginZIP = OriginZIP + self.OriginZIP_nsprefix_ = None + self.OriginCity = OriginCity + self.OriginCity_nsprefix_ = None + self.OriginState = OriginState + self.OriginState_nsprefix_ = None + self.DestinationZIP = DestinationZIP + self.DestinationZIP_nsprefix_ = None + self.DestinationCity = DestinationCity + self.DestinationCity_nsprefix_ = None + self.DestinationState = DestinationState + self.DestinationState_nsprefix_ = None + self.Date = Date + self.Date_nsprefix_ = None + self.Time = Time + self.Time_nsprefix_ = None + if isinstance(EffectiveAcceptanceDate, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(EffectiveAcceptanceDate, '%Y-%m-%d').date() + else: + initvalue_ = EffectiveAcceptanceDate + self.EffectiveAcceptanceDate = initvalue_ + self.EffectiveAcceptanceDate_nsprefix_ = None + if Commitment is None: + self.Commitment = [] + else: + self.Commitment = Commitment + self.Commitment_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExpressMailCommitmentResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExpressMailCommitmentResponse.subclass: + return ExpressMailCommitmentResponse.subclass(*args_, **kwargs_) + else: + return ExpressMailCommitmentResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_OriginZIP(self): + return self.OriginZIP + def set_OriginZIP(self, OriginZIP): + self.OriginZIP = OriginZIP + def get_OriginCity(self): + return self.OriginCity + def set_OriginCity(self, OriginCity): + self.OriginCity = OriginCity + def get_OriginState(self): + return self.OriginState + def set_OriginState(self, OriginState): + self.OriginState = OriginState + def get_DestinationZIP(self): + return self.DestinationZIP + def set_DestinationZIP(self, DestinationZIP): + self.DestinationZIP = DestinationZIP + def get_DestinationCity(self): + return self.DestinationCity + def set_DestinationCity(self, DestinationCity): + self.DestinationCity = DestinationCity + def get_DestinationState(self): + return self.DestinationState + def set_DestinationState(self, DestinationState): + self.DestinationState = DestinationState + def get_Date(self): + return self.Date + def set_Date(self, Date): + self.Date = Date + def get_Time(self): + return self.Time + def set_Time(self, Time): + self.Time = Time + def get_EffectiveAcceptanceDate(self): + return self.EffectiveAcceptanceDate + def set_EffectiveAcceptanceDate(self, EffectiveAcceptanceDate): + self.EffectiveAcceptanceDate = EffectiveAcceptanceDate + def get_Commitment(self): + return self.Commitment + def set_Commitment(self, Commitment): + self.Commitment = Commitment + def add_Commitment(self, value): + self.Commitment.append(value) + def insert_Commitment_at(self, index, value): + self.Commitment.insert(index, value) + def replace_Commitment_at(self, index, value): + self.Commitment[index] = value + def has__content(self): + if ( + self.OriginZIP is not None or + self.OriginCity is not None or + self.OriginState is not None or + self.DestinationZIP is not None or + self.DestinationCity is not None or + self.DestinationState is not None or + self.Date is not None or + self.Time is not None or + self.EffectiveAcceptanceDate is not None or + self.Commitment + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpressMailCommitmentResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExpressMailCommitmentResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExpressMailCommitmentResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpressMailCommitmentResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExpressMailCommitmentResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExpressMailCommitmentResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpressMailCommitmentResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.OriginZIP is not None: + namespaceprefix_ = self.OriginZIP_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginZIP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginZIP>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.OriginZIP, input_name='OriginZIP'), namespaceprefix_ , eol_)) + if self.OriginCity is not None: + namespaceprefix_ = self.OriginCity_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.OriginCity), input_name='OriginCity')), namespaceprefix_ , eol_)) + if self.OriginState is not None: + namespaceprefix_ = self.OriginState_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.OriginState), input_name='OriginState')), namespaceprefix_ , eol_)) + if self.DestinationZIP is not None: + namespaceprefix_ = self.DestinationZIP_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationZIP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationZIP>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.DestinationZIP, input_name='DestinationZIP'), namespaceprefix_ , eol_)) + if self.DestinationCity is not None: + namespaceprefix_ = self.DestinationCity_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DestinationCity), input_name='DestinationCity')), namespaceprefix_ , eol_)) + if self.DestinationState is not None: + namespaceprefix_ = self.DestinationState_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DestinationState), input_name='DestinationState')), namespaceprefix_ , eol_)) + if self.Date is not None: + namespaceprefix_ = self.Date_nsprefix_ + ':' if (UseCapturedNS_ and self.Date_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Date), input_name='Date')), namespaceprefix_ , eol_)) + if self.Time is not None: + namespaceprefix_ = self.Time_nsprefix_ + ':' if (UseCapturedNS_ and self.Time_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTime>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Time), input_name='Time')), namespaceprefix_ , eol_)) + if self.EffectiveAcceptanceDate is not None: + namespaceprefix_ = self.EffectiveAcceptanceDate_nsprefix_ + ':' if (UseCapturedNS_ and self.EffectiveAcceptanceDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEffectiveAcceptanceDate>%s%s' % (namespaceprefix_ , self.gds_format_date(self.EffectiveAcceptanceDate, input_name='EffectiveAcceptanceDate'), namespaceprefix_ , eol_)) + for Commitment_ in self.Commitment: + namespaceprefix_ = self.Commitment_nsprefix_ + ':' if (UseCapturedNS_ and self.Commitment_nsprefix_) else '' + Commitment_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Commitment', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'OriginZIP' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'OriginZIP') + ival_ = self.gds_validate_integer(ival_, node, 'OriginZIP') + self.OriginZIP = ival_ + self.OriginZIP_nsprefix_ = child_.prefix + elif nodeName_ == 'OriginCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'OriginCity') + value_ = self.gds_validate_string(value_, node, 'OriginCity') + self.OriginCity = value_ + self.OriginCity_nsprefix_ = child_.prefix + elif nodeName_ == 'OriginState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'OriginState') + value_ = self.gds_validate_string(value_, node, 'OriginState') + self.OriginState = value_ + self.OriginState_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationZIP' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'DestinationZIP') + ival_ = self.gds_validate_integer(ival_, node, 'DestinationZIP') + self.DestinationZIP = ival_ + self.DestinationZIP_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DestinationCity') + value_ = self.gds_validate_string(value_, node, 'DestinationCity') + self.DestinationCity = value_ + self.DestinationCity_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DestinationState') + value_ = self.gds_validate_string(value_, node, 'DestinationState') + self.DestinationState = value_ + self.DestinationState_nsprefix_ = child_.prefix + elif nodeName_ == 'Date': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Date') + value_ = self.gds_validate_string(value_, node, 'Date') + self.Date = value_ + self.Date_nsprefix_ = child_.prefix + elif nodeName_ == 'Time': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Time') + value_ = self.gds_validate_string(value_, node, 'Time') + self.Time = value_ + self.Time_nsprefix_ = child_.prefix + elif nodeName_ == 'EffectiveAcceptanceDate': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.EffectiveAcceptanceDate = dval_ + self.EffectiveAcceptanceDate_nsprefix_ = child_.prefix + elif nodeName_ == 'Commitment': + obj_ = CommitmentType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Commitment.append(obj_) + obj_.original_tagname_ = 'Commitment' +# end class ExpressMailCommitmentResponse + + +class CommitmentType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, CommitmentName=None, CommitmentTime=None, CommitmentSequence=None, Location=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.CommitmentName = CommitmentName + self.CommitmentName_nsprefix_ = None + self.CommitmentTime = CommitmentTime + self.CommitmentTime_nsprefix_ = None + self.CommitmentSequence = CommitmentSequence + self.CommitmentSequence_nsprefix_ = None + if Location is None: + self.Location = [] + else: + self.Location = Location + self.Location_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CommitmentType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CommitmentType.subclass: + return CommitmentType.subclass(*args_, **kwargs_) + else: + return CommitmentType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_CommitmentName(self): + return self.CommitmentName + def set_CommitmentName(self, CommitmentName): + self.CommitmentName = CommitmentName + def get_CommitmentTime(self): + return self.CommitmentTime + def set_CommitmentTime(self, CommitmentTime): + self.CommitmentTime = CommitmentTime + def get_CommitmentSequence(self): + return self.CommitmentSequence + def set_CommitmentSequence(self, CommitmentSequence): + self.CommitmentSequence = CommitmentSequence + def get_Location(self): + return self.Location + def set_Location(self, Location): + self.Location = Location + def add_Location(self, value): + self.Location.append(value) + def insert_Location_at(self, index, value): + self.Location.insert(index, value) + def replace_Location_at(self, index, value): + self.Location[index] = value + def has__content(self): + if ( + self.CommitmentName is not None or + self.CommitmentTime is not None or + self.CommitmentSequence is not None or + self.Location + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CommitmentType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CommitmentType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CommitmentType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CommitmentType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CommitmentType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CommitmentType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CommitmentType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.CommitmentName is not None: + namespaceprefix_ = self.CommitmentName_nsprefix_ + ':' if (UseCapturedNS_ and self.CommitmentName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommitmentName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CommitmentName), input_name='CommitmentName')), namespaceprefix_ , eol_)) + if self.CommitmentTime is not None: + namespaceprefix_ = self.CommitmentTime_nsprefix_ + ':' if (UseCapturedNS_ and self.CommitmentTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommitmentTime>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CommitmentTime), input_name='CommitmentTime')), namespaceprefix_ , eol_)) + if self.CommitmentSequence is not None: + namespaceprefix_ = self.CommitmentSequence_nsprefix_ + ':' if (UseCapturedNS_ and self.CommitmentSequence_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommitmentSequence>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CommitmentSequence), input_name='CommitmentSequence')), namespaceprefix_ , eol_)) + for Location_ in self.Location: + namespaceprefix_ = self.Location_nsprefix_ + ':' if (UseCapturedNS_ and self.Location_nsprefix_) else '' + Location_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Location', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'CommitmentName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CommitmentName') + value_ = self.gds_validate_string(value_, node, 'CommitmentName') + self.CommitmentName = value_ + self.CommitmentName_nsprefix_ = child_.prefix + elif nodeName_ == 'CommitmentTime': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CommitmentTime') + value_ = self.gds_validate_string(value_, node, 'CommitmentTime') + self.CommitmentTime = value_ + self.CommitmentTime_nsprefix_ = child_.prefix + elif nodeName_ == 'CommitmentSequence': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CommitmentSequence') + value_ = self.gds_validate_string(value_, node, 'CommitmentSequence') + self.CommitmentSequence = value_ + self.CommitmentSequence_nsprefix_ = child_.prefix + elif nodeName_ == 'Location': + obj_ = LocationType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Location.append(obj_) + obj_.original_tagname_ = 'Location' +# end class CommitmentType + + +class LocationType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ScheduledDeliveryDate=None, CutOff=None, Facility=None, Street=None, City=None, State=None, Zip=None, IsGuaranteed=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if isinstance(ScheduledDeliveryDate, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(ScheduledDeliveryDate, '%Y-%m-%d').date() + else: + initvalue_ = ScheduledDeliveryDate + self.ScheduledDeliveryDate = initvalue_ + self.ScheduledDeliveryDate_nsprefix_ = None + self.CutOff = CutOff + self.CutOff_nsprefix_ = None + self.Facility = Facility + self.Facility_nsprefix_ = None + self.Street = Street + self.Street_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.Zip = Zip + self.Zip_nsprefix_ = None + self.IsGuaranteed = IsGuaranteed + self.IsGuaranteed_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, LocationType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if LocationType.subclass: + return LocationType.subclass(*args_, **kwargs_) + else: + return LocationType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ScheduledDeliveryDate(self): + return self.ScheduledDeliveryDate + def set_ScheduledDeliveryDate(self, ScheduledDeliveryDate): + self.ScheduledDeliveryDate = ScheduledDeliveryDate + def get_CutOff(self): + return self.CutOff + def set_CutOff(self, CutOff): + self.CutOff = CutOff + def get_Facility(self): + return self.Facility + def set_Facility(self, Facility): + self.Facility = Facility + def get_Street(self): + return self.Street + def set_Street(self, Street): + self.Street = Street + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_Zip(self): + return self.Zip + def set_Zip(self, Zip): + self.Zip = Zip + def get_IsGuaranteed(self): + return self.IsGuaranteed + def set_IsGuaranteed(self, IsGuaranteed): + self.IsGuaranteed = IsGuaranteed + def has__content(self): + if ( + self.ScheduledDeliveryDate is not None or + self.CutOff is not None or + self.Facility is not None or + self.Street is not None or + self.City is not None or + self.State is not None or + self.Zip is not None or + self.IsGuaranteed is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LocationType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('LocationType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'LocationType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LocationType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LocationType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LocationType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LocationType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ScheduledDeliveryDate is not None: + namespaceprefix_ = self.ScheduledDeliveryDate_nsprefix_ + ':' if (UseCapturedNS_ and self.ScheduledDeliveryDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sScheduledDeliveryDate>%s%s' % (namespaceprefix_ , self.gds_format_date(self.ScheduledDeliveryDate, input_name='ScheduledDeliveryDate'), namespaceprefix_ , eol_)) + if self.CutOff is not None: + namespaceprefix_ = self.CutOff_nsprefix_ + ':' if (UseCapturedNS_ and self.CutOff_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCutOff>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CutOff), input_name='CutOff')), namespaceprefix_ , eol_)) + if self.Facility is not None: + namespaceprefix_ = self.Facility_nsprefix_ + ':' if (UseCapturedNS_ and self.Facility_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFacility>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Facility), input_name='Facility')), namespaceprefix_ , eol_)) + if self.Street is not None: + namespaceprefix_ = self.Street_nsprefix_ + ':' if (UseCapturedNS_ and self.Street_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sStreet>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Street), input_name='Street')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.Zip is not None: + namespaceprefix_ = self.Zip_nsprefix_ + ':' if (UseCapturedNS_ and self.Zip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Zip, input_name='Zip'), namespaceprefix_ , eol_)) + if self.IsGuaranteed is not None: + namespaceprefix_ = self.IsGuaranteed_nsprefix_ + ':' if (UseCapturedNS_ and self.IsGuaranteed_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sIsGuaranteed>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.IsGuaranteed, input_name='IsGuaranteed'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ScheduledDeliveryDate': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.ScheduledDeliveryDate = dval_ + self.ScheduledDeliveryDate_nsprefix_ = child_.prefix + elif nodeName_ == 'CutOff': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CutOff') + value_ = self.gds_validate_string(value_, node, 'CutOff') + self.CutOff = value_ + self.CutOff_nsprefix_ = child_.prefix + elif nodeName_ == 'Facility': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Facility') + value_ = self.gds_validate_string(value_, node, 'Facility') + self.Facility = value_ + self.Facility_nsprefix_ = child_.prefix + elif nodeName_ == 'Street': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Street') + value_ = self.gds_validate_string(value_, node, 'Street') + self.Street = value_ + self.Street_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'Zip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Zip') + ival_ = self.gds_validate_integer(ival_, node, 'Zip') + self.Zip = ival_ + self.Zip_nsprefix_ = child_.prefix + elif nodeName_ == 'IsGuaranteed' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'IsGuaranteed') + ival_ = self.gds_validate_integer(ival_, node, 'IsGuaranteed') + self.IsGuaranteed = ival_ + self.IsGuaranteed_nsprefix_ = child_.prefix +# end class LocationType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ExpressMailCommitmentResponse' + rootClass = ExpressMailCommitmentResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ExpressMailCommitmentResponse' + rootClass = ExpressMailCommitmentResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ExpressMailCommitmentResponse' + rootClass = ExpressMailCommitmentResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ExpressMailCommitmentResponse' + rootClass = ExpressMailCommitmentResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from express_mail_commitment_response import *\n\n') + sys.stdout.write('import express_mail_commitment_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CommitmentType", + "ExpressMailCommitmentResponse", + "LocationType" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/first_class_mail_request.py b/modules/connectors/usps_international/karrio/schemas/usps_international/first_class_mail_request.py new file mode 100644 index 0000000000..426fbb906a --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/first_class_mail_request.py @@ -0,0 +1,1403 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:04 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/first_class_mail_request.py') +# +# Command line arguments: +# ./schemas/FirstClassMailRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/first_class_mail_request.py" ./schemas/FirstClassMailRequest.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class FirstClassMailRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, OriginZip=None, DestinationZip=None, DestinationType=None, PMGuarantee=None, ClientType=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.OriginZip = OriginZip + self.OriginZip_nsprefix_ = None + self.DestinationZip = DestinationZip + self.DestinationZip_nsprefix_ = None + self.DestinationType = DestinationType + self.DestinationType_nsprefix_ = None + self.PMGuarantee = PMGuarantee + self.PMGuarantee_nsprefix_ = None + self.ClientType = ClientType + self.ClientType_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, FirstClassMailRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if FirstClassMailRequest.subclass: + return FirstClassMailRequest.subclass(*args_, **kwargs_) + else: + return FirstClassMailRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_OriginZip(self): + return self.OriginZip + def set_OriginZip(self, OriginZip): + self.OriginZip = OriginZip + def get_DestinationZip(self): + return self.DestinationZip + def set_DestinationZip(self, DestinationZip): + self.DestinationZip = DestinationZip + def get_DestinationType(self): + return self.DestinationType + def set_DestinationType(self, DestinationType): + self.DestinationType = DestinationType + def get_PMGuarantee(self): + return self.PMGuarantee + def set_PMGuarantee(self, PMGuarantee): + self.PMGuarantee = PMGuarantee + def get_ClientType(self): + return self.ClientType + def set_ClientType(self, ClientType): + self.ClientType = ClientType + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.OriginZip is not None or + self.DestinationZip is not None or + self.DestinationType is not None or + self.PMGuarantee is not None or + self.ClientType is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FirstClassMailRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('FirstClassMailRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'FirstClassMailRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FirstClassMailRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='FirstClassMailRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='FirstClassMailRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FirstClassMailRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.OriginZip is not None: + namespaceprefix_ = self.OriginZip_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.OriginZip, input_name='OriginZip'), namespaceprefix_ , eol_)) + if self.DestinationZip is not None: + namespaceprefix_ = self.DestinationZip_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.DestinationZip, input_name='DestinationZip'), namespaceprefix_ , eol_)) + if self.DestinationType is not None: + namespaceprefix_ = self.DestinationType_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationType>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.DestinationType, input_name='DestinationType'), namespaceprefix_ , eol_)) + if self.PMGuarantee is not None: + namespaceprefix_ = self.PMGuarantee_nsprefix_ + ':' if (UseCapturedNS_ and self.PMGuarantee_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPMGuarantee>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PMGuarantee), input_name='PMGuarantee')), namespaceprefix_ , eol_)) + if self.ClientType is not None: + namespaceprefix_ = self.ClientType_nsprefix_ + ':' if (UseCapturedNS_ and self.ClientType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sClientType>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ClientType, input_name='ClientType'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'OriginZip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'OriginZip') + ival_ = self.gds_validate_integer(ival_, node, 'OriginZip') + self.OriginZip = ival_ + self.OriginZip_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationZip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'DestinationZip') + ival_ = self.gds_validate_integer(ival_, node, 'DestinationZip') + self.DestinationZip = ival_ + self.DestinationZip_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationType' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'DestinationType') + ival_ = self.gds_validate_integer(ival_, node, 'DestinationType') + self.DestinationType = ival_ + self.DestinationType_nsprefix_ = child_.prefix + elif nodeName_ == 'PMGuarantee': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PMGuarantee') + value_ = self.gds_validate_string(value_, node, 'PMGuarantee') + self.PMGuarantee = value_ + self.PMGuarantee_nsprefix_ = child_.prefix + elif nodeName_ == 'ClientType' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ClientType') + ival_ = self.gds_validate_integer(ival_, node, 'ClientType') + self.ClientType = ival_ + self.ClientType_nsprefix_ = child_.prefix +# end class FirstClassMailRequest + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'FirstClassMailRequest' + rootClass = FirstClassMailRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'FirstClassMailRequest' + rootClass = FirstClassMailRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'FirstClassMailRequest' + rootClass = FirstClassMailRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'FirstClassMailRequest' + rootClass = FirstClassMailRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from first_class_mail_request import *\n\n') + sys.stdout.write('import first_class_mail_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "FirstClassMailRequest" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/first_class_mail_response.py b/modules/connectors/usps_international/karrio/schemas/usps_international/first_class_mail_response.py new file mode 100644 index 0000000000..7303020f54 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/first_class_mail_response.py @@ -0,0 +1,1396 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:04 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/first_class_mail_response.py') +# +# Command line arguments: +# ./schemas/FirstClassMailResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/first_class_mail_response.py" ./schemas/FirstClassMailResponse.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class FirstClassMailResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, OriginZip=None, DestinationZip=None, Days=None, Message=None, EffectiveAcceptanceDate=None, ScheduledDeliveryDate=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.OriginZip = OriginZip + self.OriginZip_nsprefix_ = None + self.DestinationZip = DestinationZip + self.DestinationZip_nsprefix_ = None + self.Days = Days + self.Days_nsprefix_ = None + self.Message = Message + self.Message_nsprefix_ = None + self.EffectiveAcceptanceDate = EffectiveAcceptanceDate + self.EffectiveAcceptanceDate_nsprefix_ = None + self.ScheduledDeliveryDate = ScheduledDeliveryDate + self.ScheduledDeliveryDate_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, FirstClassMailResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if FirstClassMailResponse.subclass: + return FirstClassMailResponse.subclass(*args_, **kwargs_) + else: + return FirstClassMailResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_OriginZip(self): + return self.OriginZip + def set_OriginZip(self, OriginZip): + self.OriginZip = OriginZip + def get_DestinationZip(self): + return self.DestinationZip + def set_DestinationZip(self, DestinationZip): + self.DestinationZip = DestinationZip + def get_Days(self): + return self.Days + def set_Days(self, Days): + self.Days = Days + def get_Message(self): + return self.Message + def set_Message(self, Message): + self.Message = Message + def get_EffectiveAcceptanceDate(self): + return self.EffectiveAcceptanceDate + def set_EffectiveAcceptanceDate(self, EffectiveAcceptanceDate): + self.EffectiveAcceptanceDate = EffectiveAcceptanceDate + def get_ScheduledDeliveryDate(self): + return self.ScheduledDeliveryDate + def set_ScheduledDeliveryDate(self, ScheduledDeliveryDate): + self.ScheduledDeliveryDate = ScheduledDeliveryDate + def has__content(self): + if ( + self.OriginZip is not None or + self.DestinationZip is not None or + self.Days is not None or + self.Message is not None or + self.EffectiveAcceptanceDate is not None or + self.ScheduledDeliveryDate is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FirstClassMailResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('FirstClassMailResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'FirstClassMailResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FirstClassMailResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='FirstClassMailResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='FirstClassMailResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FirstClassMailResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.OriginZip is not None: + namespaceprefix_ = self.OriginZip_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.OriginZip, input_name='OriginZip'), namespaceprefix_ , eol_)) + if self.DestinationZip is not None: + namespaceprefix_ = self.DestinationZip_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.DestinationZip, input_name='DestinationZip'), namespaceprefix_ , eol_)) + if self.Days is not None: + namespaceprefix_ = self.Days_nsprefix_ + ':' if (UseCapturedNS_ and self.Days_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDays>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Days, input_name='Days'), namespaceprefix_ , eol_)) + if self.Message is not None: + namespaceprefix_ = self.Message_nsprefix_ + ':' if (UseCapturedNS_ and self.Message_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMessage>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Message), input_name='Message')), namespaceprefix_ , eol_)) + if self.EffectiveAcceptanceDate is not None: + namespaceprefix_ = self.EffectiveAcceptanceDate_nsprefix_ + ':' if (UseCapturedNS_ and self.EffectiveAcceptanceDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEffectiveAcceptanceDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EffectiveAcceptanceDate), input_name='EffectiveAcceptanceDate')), namespaceprefix_ , eol_)) + if self.ScheduledDeliveryDate is not None: + namespaceprefix_ = self.ScheduledDeliveryDate_nsprefix_ + ':' if (UseCapturedNS_ and self.ScheduledDeliveryDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sScheduledDeliveryDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ScheduledDeliveryDate), input_name='ScheduledDeliveryDate')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'OriginZip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'OriginZip') + ival_ = self.gds_validate_integer(ival_, node, 'OriginZip') + self.OriginZip = ival_ + self.OriginZip_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationZip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'DestinationZip') + ival_ = self.gds_validate_integer(ival_, node, 'DestinationZip') + self.DestinationZip = ival_ + self.DestinationZip_nsprefix_ = child_.prefix + elif nodeName_ == 'Days' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Days') + ival_ = self.gds_validate_integer(ival_, node, 'Days') + self.Days = ival_ + self.Days_nsprefix_ = child_.prefix + elif nodeName_ == 'Message': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Message') + value_ = self.gds_validate_string(value_, node, 'Message') + self.Message = value_ + self.Message_nsprefix_ = child_.prefix + elif nodeName_ == 'EffectiveAcceptanceDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EffectiveAcceptanceDate') + value_ = self.gds_validate_string(value_, node, 'EffectiveAcceptanceDate') + self.EffectiveAcceptanceDate = value_ + self.EffectiveAcceptanceDate_nsprefix_ = child_.prefix + elif nodeName_ == 'ScheduledDeliveryDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ScheduledDeliveryDate') + value_ = self.gds_validate_string(value_, node, 'ScheduledDeliveryDate') + self.ScheduledDeliveryDate = value_ + self.ScheduledDeliveryDate_nsprefix_ = child_.prefix +# end class FirstClassMailResponse + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'FirstClassMailResponse' + rootClass = FirstClassMailResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'FirstClassMailResponse' + rootClass = FirstClassMailResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'FirstClassMailResponse' + rootClass = FirstClassMailResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'FirstClassMailResponse' + rootClass = FirstClassMailResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from first_class_mail_response import *\n\n') + sys.stdout.write('import first_class_mail_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "FirstClassMailResponse" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/hfp_facility_info_request.py b/modules/connectors/usps_international/karrio/schemas/usps_international/hfp_facility_info_request.py new file mode 100644 index 0000000000..9206bd3c85 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/hfp_facility_info_request.py @@ -0,0 +1,1403 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:04 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/hfp_facility_info_request.py') +# +# Command line arguments: +# ./schemas/HFPFacilityInfoRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/hfp_facility_info_request.py" ./schemas/HFPFacilityInfoRequest.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class HFPFacilityInfoRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, PickupCity=None, PickupState=None, PickupZIP=None, PickupZIP4=None, Service=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.PickupCity = PickupCity + self.PickupCity_nsprefix_ = None + self.PickupState = PickupState + self.PickupState_nsprefix_ = None + self.PickupZIP = PickupZIP + self.PickupZIP_nsprefix_ = None + self.PickupZIP4 = PickupZIP4 + self.PickupZIP4_nsprefix_ = None + self.Service = Service + self.Service_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, HFPFacilityInfoRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if HFPFacilityInfoRequest.subclass: + return HFPFacilityInfoRequest.subclass(*args_, **kwargs_) + else: + return HFPFacilityInfoRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_PickupCity(self): + return self.PickupCity + def set_PickupCity(self, PickupCity): + self.PickupCity = PickupCity + def get_PickupState(self): + return self.PickupState + def set_PickupState(self, PickupState): + self.PickupState = PickupState + def get_PickupZIP(self): + return self.PickupZIP + def set_PickupZIP(self, PickupZIP): + self.PickupZIP = PickupZIP + def get_PickupZIP4(self): + return self.PickupZIP4 + def set_PickupZIP4(self, PickupZIP4): + self.PickupZIP4 = PickupZIP4 + def get_Service(self): + return self.Service + def set_Service(self, Service): + self.Service = Service + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.PickupCity is not None or + self.PickupState is not None or + self.PickupZIP is not None or + self.PickupZIP4 is not None or + self.Service is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HFPFacilityInfoRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('HFPFacilityInfoRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'HFPFacilityInfoRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='HFPFacilityInfoRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='HFPFacilityInfoRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='HFPFacilityInfoRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HFPFacilityInfoRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.PickupCity is not None: + namespaceprefix_ = self.PickupCity_nsprefix_ + ':' if (UseCapturedNS_ and self.PickupCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPickupCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PickupCity), input_name='PickupCity')), namespaceprefix_ , eol_)) + if self.PickupState is not None: + namespaceprefix_ = self.PickupState_nsprefix_ + ':' if (UseCapturedNS_ and self.PickupState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPickupState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PickupState), input_name='PickupState')), namespaceprefix_ , eol_)) + if self.PickupZIP is not None: + namespaceprefix_ = self.PickupZIP_nsprefix_ + ':' if (UseCapturedNS_ and self.PickupZIP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPickupZIP>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.PickupZIP, input_name='PickupZIP'), namespaceprefix_ , eol_)) + if self.PickupZIP4 is not None: + namespaceprefix_ = self.PickupZIP4_nsprefix_ + ':' if (UseCapturedNS_ and self.PickupZIP4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPickupZIP4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PickupZIP4), input_name='PickupZIP4')), namespaceprefix_ , eol_)) + if self.Service is not None: + namespaceprefix_ = self.Service_nsprefix_ + ':' if (UseCapturedNS_ and self.Service_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sService>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Service), input_name='Service')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'PickupCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PickupCity') + value_ = self.gds_validate_string(value_, node, 'PickupCity') + self.PickupCity = value_ + self.PickupCity_nsprefix_ = child_.prefix + elif nodeName_ == 'PickupState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PickupState') + value_ = self.gds_validate_string(value_, node, 'PickupState') + self.PickupState = value_ + self.PickupState_nsprefix_ = child_.prefix + elif nodeName_ == 'PickupZIP' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'PickupZIP') + ival_ = self.gds_validate_integer(ival_, node, 'PickupZIP') + self.PickupZIP = ival_ + self.PickupZIP_nsprefix_ = child_.prefix + elif nodeName_ == 'PickupZIP4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PickupZIP4') + value_ = self.gds_validate_string(value_, node, 'PickupZIP4') + self.PickupZIP4 = value_ + self.PickupZIP4_nsprefix_ = child_.prefix + elif nodeName_ == 'Service': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Service') + value_ = self.gds_validate_string(value_, node, 'Service') + self.Service = value_ + self.Service_nsprefix_ = child_.prefix +# end class HFPFacilityInfoRequest + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'HFPFacilityInfoRequest' + rootClass = HFPFacilityInfoRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'HFPFacilityInfoRequest' + rootClass = HFPFacilityInfoRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'HFPFacilityInfoRequest' + rootClass = HFPFacilityInfoRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'HFPFacilityInfoRequest' + rootClass = HFPFacilityInfoRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from hfp_facility_info_request import *\n\n') + sys.stdout.write('import hfp_facility_info_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "HFPFacilityInfoRequest" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/hfp_facility_info_response.py b/modules/connectors/usps_international/karrio/schemas/usps_international/hfp_facility_info_response.py new file mode 100644 index 0000000000..29c60cd9d6 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/hfp_facility_info_response.py @@ -0,0 +1,1601 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:05 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/hfp_facility_info_response.py') +# +# Command line arguments: +# ./schemas/HFPFacilityInfoResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/hfp_facility_info_response.py" ./schemas/HFPFacilityInfoResponse.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class HFPFacilityInfoResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, PickupCity=None, PickupState=None, PickupZIP=None, PickupZIP4=None, Facility=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.PickupCity = PickupCity + self.PickupCity_nsprefix_ = None + self.PickupState = PickupState + self.PickupState_nsprefix_ = None + self.PickupZIP = PickupZIP + self.PickupZIP_nsprefix_ = None + self.PickupZIP4 = PickupZIP4 + self.PickupZIP4_nsprefix_ = None + if Facility is None: + self.Facility = [] + else: + self.Facility = Facility + self.Facility_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, HFPFacilityInfoResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if HFPFacilityInfoResponse.subclass: + return HFPFacilityInfoResponse.subclass(*args_, **kwargs_) + else: + return HFPFacilityInfoResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_PickupCity(self): + return self.PickupCity + def set_PickupCity(self, PickupCity): + self.PickupCity = PickupCity + def get_PickupState(self): + return self.PickupState + def set_PickupState(self, PickupState): + self.PickupState = PickupState + def get_PickupZIP(self): + return self.PickupZIP + def set_PickupZIP(self, PickupZIP): + self.PickupZIP = PickupZIP + def get_PickupZIP4(self): + return self.PickupZIP4 + def set_PickupZIP4(self, PickupZIP4): + self.PickupZIP4 = PickupZIP4 + def get_Facility(self): + return self.Facility + def set_Facility(self, Facility): + self.Facility = Facility + def add_Facility(self, value): + self.Facility.append(value) + def insert_Facility_at(self, index, value): + self.Facility.insert(index, value) + def replace_Facility_at(self, index, value): + self.Facility[index] = value + def has__content(self): + if ( + self.PickupCity is not None or + self.PickupState is not None or + self.PickupZIP is not None or + self.PickupZIP4 is not None or + self.Facility + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HFPFacilityInfoResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('HFPFacilityInfoResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'HFPFacilityInfoResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='HFPFacilityInfoResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='HFPFacilityInfoResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='HFPFacilityInfoResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HFPFacilityInfoResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.PickupCity is not None: + namespaceprefix_ = self.PickupCity_nsprefix_ + ':' if (UseCapturedNS_ and self.PickupCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPickupCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PickupCity), input_name='PickupCity')), namespaceprefix_ , eol_)) + if self.PickupState is not None: + namespaceprefix_ = self.PickupState_nsprefix_ + ':' if (UseCapturedNS_ and self.PickupState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPickupState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PickupState), input_name='PickupState')), namespaceprefix_ , eol_)) + if self.PickupZIP is not None: + namespaceprefix_ = self.PickupZIP_nsprefix_ + ':' if (UseCapturedNS_ and self.PickupZIP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPickupZIP>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.PickupZIP, input_name='PickupZIP'), namespaceprefix_ , eol_)) + if self.PickupZIP4 is not None: + namespaceprefix_ = self.PickupZIP4_nsprefix_ + ':' if (UseCapturedNS_ and self.PickupZIP4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPickupZIP4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PickupZIP4), input_name='PickupZIP4')), namespaceprefix_ , eol_)) + for Facility_ in self.Facility: + namespaceprefix_ = self.Facility_nsprefix_ + ':' if (UseCapturedNS_ and self.Facility_nsprefix_) else '' + Facility_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Facility', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'PickupCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PickupCity') + value_ = self.gds_validate_string(value_, node, 'PickupCity') + self.PickupCity = value_ + self.PickupCity_nsprefix_ = child_.prefix + elif nodeName_ == 'PickupState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PickupState') + value_ = self.gds_validate_string(value_, node, 'PickupState') + self.PickupState = value_ + self.PickupState_nsprefix_ = child_.prefix + elif nodeName_ == 'PickupZIP' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'PickupZIP') + ival_ = self.gds_validate_integer(ival_, node, 'PickupZIP') + self.PickupZIP = ival_ + self.PickupZIP_nsprefix_ = child_.prefix + elif nodeName_ == 'PickupZIP4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PickupZIP4') + value_ = self.gds_validate_string(value_, node, 'PickupZIP4') + self.PickupZIP4 = value_ + self.PickupZIP4_nsprefix_ = child_.prefix + elif nodeName_ == 'Facility': + obj_ = FacilityType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Facility.append(obj_) + obj_.original_tagname_ = 'Facility' +# end class HFPFacilityInfoResponse + + +class FacilityType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, FacilityID=None, FacilityName=None, FacilityAddress=None, FacilityCity=None, FacilityState=None, FacilityZIP=None, FacilityZIP4=None, Has10amCommitment=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.FacilityID = FacilityID + self.FacilityID_nsprefix_ = None + self.FacilityName = FacilityName + self.FacilityName_nsprefix_ = None + self.FacilityAddress = FacilityAddress + self.FacilityAddress_nsprefix_ = None + self.FacilityCity = FacilityCity + self.FacilityCity_nsprefix_ = None + self.FacilityState = FacilityState + self.FacilityState_nsprefix_ = None + self.FacilityZIP = FacilityZIP + self.FacilityZIP_nsprefix_ = None + self.FacilityZIP4 = FacilityZIP4 + self.FacilityZIP4_nsprefix_ = None + self.Has10amCommitment = Has10amCommitment + self.Has10amCommitment_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, FacilityType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if FacilityType.subclass: + return FacilityType.subclass(*args_, **kwargs_) + else: + return FacilityType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FacilityID(self): + return self.FacilityID + def set_FacilityID(self, FacilityID): + self.FacilityID = FacilityID + def get_FacilityName(self): + return self.FacilityName + def set_FacilityName(self, FacilityName): + self.FacilityName = FacilityName + def get_FacilityAddress(self): + return self.FacilityAddress + def set_FacilityAddress(self, FacilityAddress): + self.FacilityAddress = FacilityAddress + def get_FacilityCity(self): + return self.FacilityCity + def set_FacilityCity(self, FacilityCity): + self.FacilityCity = FacilityCity + def get_FacilityState(self): + return self.FacilityState + def set_FacilityState(self, FacilityState): + self.FacilityState = FacilityState + def get_FacilityZIP(self): + return self.FacilityZIP + def set_FacilityZIP(self, FacilityZIP): + self.FacilityZIP = FacilityZIP + def get_FacilityZIP4(self): + return self.FacilityZIP4 + def set_FacilityZIP4(self, FacilityZIP4): + self.FacilityZIP4 = FacilityZIP4 + def get_Has10amCommitment(self): + return self.Has10amCommitment + def set_Has10amCommitment(self, Has10amCommitment): + self.Has10amCommitment = Has10amCommitment + def has__content(self): + if ( + self.FacilityID is not None or + self.FacilityName is not None or + self.FacilityAddress is not None or + self.FacilityCity is not None or + self.FacilityState is not None or + self.FacilityZIP is not None or + self.FacilityZIP4 is not None or + self.Has10amCommitment is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FacilityType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('FacilityType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'FacilityType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FacilityType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='FacilityType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='FacilityType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FacilityType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FacilityID is not None: + namespaceprefix_ = self.FacilityID_nsprefix_ + ':' if (UseCapturedNS_ and self.FacilityID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFacilityID>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.FacilityID, input_name='FacilityID'), namespaceprefix_ , eol_)) + if self.FacilityName is not None: + namespaceprefix_ = self.FacilityName_nsprefix_ + ':' if (UseCapturedNS_ and self.FacilityName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFacilityName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FacilityName), input_name='FacilityName')), namespaceprefix_ , eol_)) + if self.FacilityAddress is not None: + namespaceprefix_ = self.FacilityAddress_nsprefix_ + ':' if (UseCapturedNS_ and self.FacilityAddress_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFacilityAddress>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FacilityAddress), input_name='FacilityAddress')), namespaceprefix_ , eol_)) + if self.FacilityCity is not None: + namespaceprefix_ = self.FacilityCity_nsprefix_ + ':' if (UseCapturedNS_ and self.FacilityCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFacilityCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FacilityCity), input_name='FacilityCity')), namespaceprefix_ , eol_)) + if self.FacilityState is not None: + namespaceprefix_ = self.FacilityState_nsprefix_ + ':' if (UseCapturedNS_ and self.FacilityState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFacilityState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FacilityState), input_name='FacilityState')), namespaceprefix_ , eol_)) + if self.FacilityZIP is not None: + namespaceprefix_ = self.FacilityZIP_nsprefix_ + ':' if (UseCapturedNS_ and self.FacilityZIP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFacilityZIP>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.FacilityZIP, input_name='FacilityZIP'), namespaceprefix_ , eol_)) + if self.FacilityZIP4 is not None: + namespaceprefix_ = self.FacilityZIP4_nsprefix_ + ':' if (UseCapturedNS_ and self.FacilityZIP4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFacilityZIP4>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.FacilityZIP4, input_name='FacilityZIP4'), namespaceprefix_ , eol_)) + if self.Has10amCommitment is not None: + namespaceprefix_ = self.Has10amCommitment_nsprefix_ + ':' if (UseCapturedNS_ and self.Has10amCommitment_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHas10amCommitment>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Has10amCommitment), input_name='Has10amCommitment')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FacilityID' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'FacilityID') + ival_ = self.gds_validate_integer(ival_, node, 'FacilityID') + self.FacilityID = ival_ + self.FacilityID_nsprefix_ = child_.prefix + elif nodeName_ == 'FacilityName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FacilityName') + value_ = self.gds_validate_string(value_, node, 'FacilityName') + self.FacilityName = value_ + self.FacilityName_nsprefix_ = child_.prefix + elif nodeName_ == 'FacilityAddress': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FacilityAddress') + value_ = self.gds_validate_string(value_, node, 'FacilityAddress') + self.FacilityAddress = value_ + self.FacilityAddress_nsprefix_ = child_.prefix + elif nodeName_ == 'FacilityCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FacilityCity') + value_ = self.gds_validate_string(value_, node, 'FacilityCity') + self.FacilityCity = value_ + self.FacilityCity_nsprefix_ = child_.prefix + elif nodeName_ == 'FacilityState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FacilityState') + value_ = self.gds_validate_string(value_, node, 'FacilityState') + self.FacilityState = value_ + self.FacilityState_nsprefix_ = child_.prefix + elif nodeName_ == 'FacilityZIP' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'FacilityZIP') + ival_ = self.gds_validate_integer(ival_, node, 'FacilityZIP') + self.FacilityZIP = ival_ + self.FacilityZIP_nsprefix_ = child_.prefix + elif nodeName_ == 'FacilityZIP4' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'FacilityZIP4') + ival_ = self.gds_validate_integer(ival_, node, 'FacilityZIP4') + self.FacilityZIP4 = ival_ + self.FacilityZIP4_nsprefix_ = child_.prefix + elif nodeName_ == 'Has10amCommitment': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Has10amCommitment') + value_ = self.gds_validate_string(value_, node, 'Has10amCommitment') + self.Has10amCommitment = value_ + self.Has10amCommitment_nsprefix_ = child_.prefix +# end class FacilityType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'HFPFacilityInfoResponse' + rootClass = HFPFacilityInfoResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'HFPFacilityInfoResponse' + rootClass = HFPFacilityInfoResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'HFPFacilityInfoResponse' + rootClass = HFPFacilityInfoResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'HFPFacilityInfoResponse' + rootClass = HFPFacilityInfoResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from hfp_facility_info_response import *\n\n') + sys.stdout.write('import hfp_facility_info_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "FacilityType", + "HFPFacilityInfoResponse" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/intl_rate_v2_request.py b/modules/connectors/usps_international/karrio/schemas/usps_international/intl_rate_v2_request.py new file mode 100644 index 0000000000..fba850a53c --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/intl_rate_v2_request.py @@ -0,0 +1,2150 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:05 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/intl_rate_v2_request.py') +# +# Command line arguments: +# ./schemas/IntlRateV2Request.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/intl_rate_v2_request.py" ./schemas/IntlRateV2Request.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class IntlRateV2Request(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, Revision=None, Package=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.Revision = Revision + self.Revision_nsprefix_ = None + if Package is None: + self.Package = [] + else: + self.Package = Package + self.Package_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, IntlRateV2Request) + if subclass is not None: + return subclass(*args_, **kwargs_) + if IntlRateV2Request.subclass: + return IntlRateV2Request.subclass(*args_, **kwargs_) + else: + return IntlRateV2Request(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Revision(self): + return self.Revision + def set_Revision(self, Revision): + self.Revision = Revision + def get_Package(self): + return self.Package + def set_Package(self, Package): + self.Package = Package + def add_Package(self, value): + self.Package.append(value) + def insert_Package_at(self, index, value): + self.Package.insert(index, value) + def replace_Package_at(self, index, value): + self.Package[index] = value + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.Revision is not None or + self.Package + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IntlRateV2Request', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('IntlRateV2Request') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'IntlRateV2Request': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IntlRateV2Request') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IntlRateV2Request', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IntlRateV2Request'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IntlRateV2Request', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Revision is not None: + namespaceprefix_ = self.Revision_nsprefix_ + ':' if (UseCapturedNS_ and self.Revision_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRevision>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Revision), input_name='Revision')), namespaceprefix_ , eol_)) + for Package_ in self.Package: + namespaceprefix_ = self.Package_nsprefix_ + ':' if (UseCapturedNS_ and self.Package_nsprefix_) else '' + Package_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Package', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Revision': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Revision') + value_ = self.gds_validate_string(value_, node, 'Revision') + self.Revision = value_ + self.Revision_nsprefix_ = child_.prefix + elif nodeName_ == 'Package': + obj_ = PackageType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Package.append(obj_) + obj_.original_tagname_ = 'Package' +# end class IntlRateV2Request + + +class PackageType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ID=None, Pounds=None, Ounces=None, Machinable=True, MailType=None, GXG=None, ValueOfContents=None, Country=None, Container=None, Size=None, Width=None, Length=None, Height=None, Girth=None, OriginZip=None, CommercialFlag=None, CommercialPlusFlag=None, ExtraServices=None, AcceptanceDateTime=None, DestinationPostalCode=None, Content=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ID = _cast(None, ID) + self.ID_nsprefix_ = None + self.Pounds = Pounds + self.validate_PoundsType(self.Pounds) + self.Pounds_nsprefix_ = None + self.Ounces = Ounces + self.validate_OuncesType(self.Ounces) + self.Ounces_nsprefix_ = None + self.Machinable = Machinable + self.Machinable_nsprefix_ = None + self.MailType = MailType + self.MailType_nsprefix_ = None + self.GXG = GXG + self.GXG_nsprefix_ = None + self.ValueOfContents = ValueOfContents + self.ValueOfContents_nsprefix_ = None + self.Country = Country + self.Country_nsprefix_ = None + self.Container = Container + self.Container_nsprefix_ = None + self.Size = Size + self.Size_nsprefix_ = None + self.Width = Width + self.Width_nsprefix_ = None + self.Length = Length + self.Length_nsprefix_ = None + self.Height = Height + self.Height_nsprefix_ = None + self.Girth = Girth + self.Girth_nsprefix_ = None + self.OriginZip = OriginZip + self.OriginZip_nsprefix_ = None + self.CommercialFlag = CommercialFlag + self.CommercialFlag_nsprefix_ = None + self.CommercialPlusFlag = CommercialPlusFlag + self.CommercialPlusFlag_nsprefix_ = None + self.ExtraServices = ExtraServices + self.ExtraServices_nsprefix_ = None + self.AcceptanceDateTime = AcceptanceDateTime + self.AcceptanceDateTime_nsprefix_ = None + self.DestinationPostalCode = DestinationPostalCode + self.DestinationPostalCode_nsprefix_ = None + self.Content = Content + self.Content_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PackageType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PackageType.subclass: + return PackageType.subclass(*args_, **kwargs_) + else: + return PackageType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Pounds(self): + return self.Pounds + def set_Pounds(self, Pounds): + self.Pounds = Pounds + def get_Ounces(self): + return self.Ounces + def set_Ounces(self, Ounces): + self.Ounces = Ounces + def get_Machinable(self): + return self.Machinable + def set_Machinable(self, Machinable): + self.Machinable = Machinable + def get_MailType(self): + return self.MailType + def set_MailType(self, MailType): + self.MailType = MailType + def get_GXG(self): + return self.GXG + def set_GXG(self, GXG): + self.GXG = GXG + def get_ValueOfContents(self): + return self.ValueOfContents + def set_ValueOfContents(self, ValueOfContents): + self.ValueOfContents = ValueOfContents + def get_Country(self): + return self.Country + def set_Country(self, Country): + self.Country = Country + def get_Container(self): + return self.Container + def set_Container(self, Container): + self.Container = Container + def get_Size(self): + return self.Size + def set_Size(self, Size): + self.Size = Size + def get_Width(self): + return self.Width + def set_Width(self, Width): + self.Width = Width + def get_Length(self): + return self.Length + def set_Length(self, Length): + self.Length = Length + def get_Height(self): + return self.Height + def set_Height(self, Height): + self.Height = Height + def get_Girth(self): + return self.Girth + def set_Girth(self, Girth): + self.Girth = Girth + def get_OriginZip(self): + return self.OriginZip + def set_OriginZip(self, OriginZip): + self.OriginZip = OriginZip + def get_CommercialFlag(self): + return self.CommercialFlag + def set_CommercialFlag(self, CommercialFlag): + self.CommercialFlag = CommercialFlag + def get_CommercialPlusFlag(self): + return self.CommercialPlusFlag + def set_CommercialPlusFlag(self, CommercialPlusFlag): + self.CommercialPlusFlag = CommercialPlusFlag + def get_ExtraServices(self): + return self.ExtraServices + def set_ExtraServices(self, ExtraServices): + self.ExtraServices = ExtraServices + def get_AcceptanceDateTime(self): + return self.AcceptanceDateTime + def set_AcceptanceDateTime(self, AcceptanceDateTime): + self.AcceptanceDateTime = AcceptanceDateTime + def get_DestinationPostalCode(self): + return self.DestinationPostalCode + def set_DestinationPostalCode(self, DestinationPostalCode): + self.DestinationPostalCode = DestinationPostalCode + def get_Content(self): + return self.Content + def set_Content(self, Content): + self.Content = Content + def get_ID(self): + return self.ID + def set_ID(self, ID): + self.ID = ID + def validate_PoundsType(self, value): + result = True + # Validate type PoundsType, a restriction on xs:integer. + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' % {"value": value, "lineno": lineno, }) + return False + if value < 0: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on PoundsType' % {"value": value, "lineno": lineno} ) + result = False + if value > 70: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd maxInclusive restriction on PoundsType' % {"value": value, "lineno": lineno} ) + result = False + return result + def validate_OuncesType(self, value): + result = True + # Validate type OuncesType, a restriction on xs:decimal. + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, decimal_.Decimal): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (decimal_.Decimal)' % {"value": value, "lineno": lineno, }) + return False + if value < 0.0: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on OuncesType' % {"value": value, "lineno": lineno} ) + result = False + return result + def has__content(self): + if ( + self.Pounds is not None or + self.Ounces is not None or + not self.Machinable or + self.MailType is not None or + self.GXG is not None or + self.ValueOfContents is not None or + self.Country is not None or + self.Container is not None or + self.Size is not None or + self.Width is not None or + self.Length is not None or + self.Height is not None or + self.Girth is not None or + self.OriginZip is not None or + self.CommercialFlag is not None or + self.CommercialPlusFlag is not None or + self.ExtraServices is not None or + self.AcceptanceDateTime is not None or + self.DestinationPostalCode is not None or + self.Content is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PackageType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PackageType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PackageType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PackageType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PackageType'): + if self.ID is not None and 'ID' not in already_processed: + already_processed.add('ID') + outfile.write(' ID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ID), input_name='ID')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Pounds is not None: + namespaceprefix_ = self.Pounds_nsprefix_ + ':' if (UseCapturedNS_ and self.Pounds_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPounds>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Pounds, input_name='Pounds'), namespaceprefix_ , eol_)) + if self.Ounces is not None: + namespaceprefix_ = self.Ounces_nsprefix_ + ':' if (UseCapturedNS_ and self.Ounces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOunces>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Ounces, input_name='Ounces'), namespaceprefix_ , eol_)) + if not self.Machinable: + namespaceprefix_ = self.Machinable_nsprefix_ + ':' if (UseCapturedNS_ and self.Machinable_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMachinable>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.Machinable, input_name='Machinable'), namespaceprefix_ , eol_)) + if self.MailType is not None: + namespaceprefix_ = self.MailType_nsprefix_ + ':' if (UseCapturedNS_ and self.MailType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMailType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MailType), input_name='MailType')), namespaceprefix_ , eol_)) + if self.GXG is not None: + namespaceprefix_ = self.GXG_nsprefix_ + ':' if (UseCapturedNS_ and self.GXG_nsprefix_) else '' + self.GXG.export(outfile, level, namespaceprefix_, namespacedef_='', name_='GXG', pretty_print=pretty_print) + if self.ValueOfContents is not None: + namespaceprefix_ = self.ValueOfContents_nsprefix_ + ':' if (UseCapturedNS_ and self.ValueOfContents_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sValueOfContents>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ValueOfContents), input_name='ValueOfContents')), namespaceprefix_ , eol_)) + if self.Country is not None: + namespaceprefix_ = self.Country_nsprefix_ + ':' if (UseCapturedNS_ and self.Country_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCountry>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Country), input_name='Country')), namespaceprefix_ , eol_)) + if self.Container is not None: + namespaceprefix_ = self.Container_nsprefix_ + ':' if (UseCapturedNS_ and self.Container_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContainer>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Container), input_name='Container')), namespaceprefix_ , eol_)) + if self.Size is not None: + namespaceprefix_ = self.Size_nsprefix_ + ':' if (UseCapturedNS_ and self.Size_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSize>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Size), input_name='Size')), namespaceprefix_ , eol_)) + if self.Width is not None: + namespaceprefix_ = self.Width_nsprefix_ + ':' if (UseCapturedNS_ and self.Width_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sWidth>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Width, input_name='Width'), namespaceprefix_ , eol_)) + if self.Length is not None: + namespaceprefix_ = self.Length_nsprefix_ + ':' if (UseCapturedNS_ and self.Length_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLength>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Length, input_name='Length'), namespaceprefix_ , eol_)) + if self.Height is not None: + namespaceprefix_ = self.Height_nsprefix_ + ':' if (UseCapturedNS_ and self.Height_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHeight>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Height, input_name='Height'), namespaceprefix_ , eol_)) + if self.Girth is not None: + namespaceprefix_ = self.Girth_nsprefix_ + ':' if (UseCapturedNS_ and self.Girth_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGirth>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Girth, input_name='Girth'), namespaceprefix_ , eol_)) + if self.OriginZip is not None: + namespaceprefix_ = self.OriginZip_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginZip>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.OriginZip), input_name='OriginZip')), namespaceprefix_ , eol_)) + if self.CommercialFlag is not None: + namespaceprefix_ = self.CommercialFlag_nsprefix_ + ':' if (UseCapturedNS_ and self.CommercialFlag_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommercialFlag>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CommercialFlag), input_name='CommercialFlag')), namespaceprefix_ , eol_)) + if self.CommercialPlusFlag is not None: + namespaceprefix_ = self.CommercialPlusFlag_nsprefix_ + ':' if (UseCapturedNS_ and self.CommercialPlusFlag_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommercialPlusFlag>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CommercialPlusFlag), input_name='CommercialPlusFlag')), namespaceprefix_ , eol_)) + if self.ExtraServices is not None: + namespaceprefix_ = self.ExtraServices_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraServices_nsprefix_) else '' + self.ExtraServices.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ExtraServices', pretty_print=pretty_print) + if self.AcceptanceDateTime is not None: + namespaceprefix_ = self.AcceptanceDateTime_nsprefix_ + ':' if (UseCapturedNS_ and self.AcceptanceDateTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAcceptanceDateTime>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AcceptanceDateTime), input_name='AcceptanceDateTime')), namespaceprefix_ , eol_)) + if self.DestinationPostalCode is not None: + namespaceprefix_ = self.DestinationPostalCode_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationPostalCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationPostalCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DestinationPostalCode), input_name='DestinationPostalCode')), namespaceprefix_ , eol_)) + if self.Content is not None: + namespaceprefix_ = self.Content_nsprefix_ + ':' if (UseCapturedNS_ and self.Content_nsprefix_) else '' + self.Content.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Content', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ID', node) + if value is not None and 'ID' not in already_processed: + already_processed.add('ID') + self.ID = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Pounds' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Pounds') + ival_ = self.gds_validate_integer(ival_, node, 'Pounds') + self.Pounds = ival_ + self.Pounds_nsprefix_ = child_.prefix + # validate type PoundsType + self.validate_PoundsType(self.Pounds) + elif nodeName_ == 'Ounces' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Ounces') + fval_ = self.gds_validate_decimal(fval_, node, 'Ounces') + self.Ounces = fval_ + self.Ounces_nsprefix_ = child_.prefix + # validate type OuncesType + self.validate_OuncesType(self.Ounces) + elif nodeName_ == 'Machinable': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'Machinable') + ival_ = self.gds_validate_boolean(ival_, node, 'Machinable') + self.Machinable = ival_ + self.Machinable_nsprefix_ = child_.prefix + elif nodeName_ == 'MailType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'MailType') + value_ = self.gds_validate_string(value_, node, 'MailType') + self.MailType = value_ + self.MailType_nsprefix_ = child_.prefix + elif nodeName_ == 'GXG': + obj_ = GXGType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.GXG = obj_ + obj_.original_tagname_ = 'GXG' + elif nodeName_ == 'ValueOfContents': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ValueOfContents') + value_ = self.gds_validate_string(value_, node, 'ValueOfContents') + self.ValueOfContents = value_ + self.ValueOfContents_nsprefix_ = child_.prefix + elif nodeName_ == 'Country': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Country') + value_ = self.gds_validate_string(value_, node, 'Country') + self.Country = value_ + self.Country_nsprefix_ = child_.prefix + elif nodeName_ == 'Container': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Container') + value_ = self.gds_validate_string(value_, node, 'Container') + self.Container = value_ + self.Container_nsprefix_ = child_.prefix + elif nodeName_ == 'Size': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Size') + value_ = self.gds_validate_string(value_, node, 'Size') + self.Size = value_ + self.Size_nsprefix_ = child_.prefix + elif nodeName_ == 'Width' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Width') + fval_ = self.gds_validate_decimal(fval_, node, 'Width') + self.Width = fval_ + self.Width_nsprefix_ = child_.prefix + elif nodeName_ == 'Length' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Length') + fval_ = self.gds_validate_decimal(fval_, node, 'Length') + self.Length = fval_ + self.Length_nsprefix_ = child_.prefix + elif nodeName_ == 'Height' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Height') + fval_ = self.gds_validate_decimal(fval_, node, 'Height') + self.Height = fval_ + self.Height_nsprefix_ = child_.prefix + elif nodeName_ == 'Girth' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Girth') + fval_ = self.gds_validate_decimal(fval_, node, 'Girth') + self.Girth = fval_ + self.Girth_nsprefix_ = child_.prefix + elif nodeName_ == 'OriginZip': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'OriginZip') + value_ = self.gds_validate_string(value_, node, 'OriginZip') + self.OriginZip = value_ + self.OriginZip_nsprefix_ = child_.prefix + elif nodeName_ == 'CommercialFlag': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CommercialFlag') + value_ = self.gds_validate_string(value_, node, 'CommercialFlag') + self.CommercialFlag = value_ + self.CommercialFlag_nsprefix_ = child_.prefix + elif nodeName_ == 'CommercialPlusFlag': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CommercialPlusFlag') + value_ = self.gds_validate_string(value_, node, 'CommercialPlusFlag') + self.CommercialPlusFlag = value_ + self.CommercialPlusFlag_nsprefix_ = child_.prefix + elif nodeName_ == 'ExtraServices': + obj_ = ExtraServicesType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ExtraServices = obj_ + obj_.original_tagname_ = 'ExtraServices' + elif nodeName_ == 'AcceptanceDateTime': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AcceptanceDateTime') + value_ = self.gds_validate_string(value_, node, 'AcceptanceDateTime') + self.AcceptanceDateTime = value_ + self.AcceptanceDateTime_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationPostalCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DestinationPostalCode') + value_ = self.gds_validate_string(value_, node, 'DestinationPostalCode') + self.DestinationPostalCode = value_ + self.DestinationPostalCode_nsprefix_ = child_.prefix + elif nodeName_ == 'Content': + obj_ = ContentType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Content = obj_ + obj_.original_tagname_ = 'Content' +# end class PackageType + + +class GXGType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, POBoxFlag=None, GiftFlag=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.POBoxFlag = POBoxFlag + self.POBoxFlag_nsprefix_ = None + self.GiftFlag = GiftFlag + self.GiftFlag_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, GXGType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if GXGType.subclass: + return GXGType.subclass(*args_, **kwargs_) + else: + return GXGType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_POBoxFlag(self): + return self.POBoxFlag + def set_POBoxFlag(self, POBoxFlag): + self.POBoxFlag = POBoxFlag + def get_GiftFlag(self): + return self.GiftFlag + def set_GiftFlag(self, GiftFlag): + self.GiftFlag = GiftFlag + def has__content(self): + if ( + self.POBoxFlag is not None or + self.GiftFlag is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GXGType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('GXGType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'GXGType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GXGType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GXGType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GXGType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GXGType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.POBoxFlag is not None: + namespaceprefix_ = self.POBoxFlag_nsprefix_ + ':' if (UseCapturedNS_ and self.POBoxFlag_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPOBoxFlag>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.POBoxFlag), input_name='POBoxFlag')), namespaceprefix_ , eol_)) + if self.GiftFlag is not None: + namespaceprefix_ = self.GiftFlag_nsprefix_ + ':' if (UseCapturedNS_ and self.GiftFlag_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGiftFlag>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.GiftFlag), input_name='GiftFlag')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'POBoxFlag': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'POBoxFlag') + value_ = self.gds_validate_string(value_, node, 'POBoxFlag') + self.POBoxFlag = value_ + self.POBoxFlag_nsprefix_ = child_.prefix + elif nodeName_ == 'GiftFlag': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'GiftFlag') + value_ = self.gds_validate_string(value_, node, 'GiftFlag') + self.GiftFlag = value_ + self.GiftFlag_nsprefix_ = child_.prefix +# end class GXGType + + +class ExtraServicesType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ExtraService=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if ExtraService is None: + self.ExtraService = [] + else: + self.ExtraService = ExtraService + self.ExtraService_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExtraServicesType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExtraServicesType.subclass: + return ExtraServicesType.subclass(*args_, **kwargs_) + else: + return ExtraServicesType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ExtraService(self): + return self.ExtraService + def set_ExtraService(self, ExtraService): + self.ExtraService = ExtraService + def add_ExtraService(self, value): + self.ExtraService.append(value) + def insert_ExtraService_at(self, index, value): + self.ExtraService.insert(index, value) + def replace_ExtraService_at(self, index, value): + self.ExtraService[index] = value + def has__content(self): + if ( + self.ExtraService + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExtraServicesType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExtraServicesType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtraServicesType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtraServicesType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtraServicesType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for ExtraService_ in self.ExtraService: + namespaceprefix_ = self.ExtraService_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraService_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sExtraService>%s%s' % (namespaceprefix_ , self.gds_format_integer(ExtraService_, input_name='ExtraService'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ExtraService' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ExtraService') + ival_ = self.gds_validate_integer(ival_, node, 'ExtraService') + self.ExtraService.append(ival_) + self.ExtraService_nsprefix_ = child_.prefix +# end class ExtraServicesType + + +class ContentType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ContentType_member=None, ContentDescription=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ContentType = ContentType_member + self.ContentType_nsprefix_ = None + self.ContentDescription = ContentDescription + self.ContentDescription_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ContentType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ContentType.subclass: + return ContentType.subclass(*args_, **kwargs_) + else: + return ContentType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ContentType(self): + return self.ContentType + def set_ContentType(self, ContentType): + self.ContentType = ContentType + def get_ContentDescription(self): + return self.ContentDescription + def set_ContentDescription(self, ContentDescription): + self.ContentDescription = ContentDescription + def has__content(self): + if ( + self.ContentType is not None or + self.ContentDescription is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContentType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ContentType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ContentType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContentType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ContentType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ContentType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContentType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ContentType is not None: + namespaceprefix_ = self.ContentType_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentType), input_name='ContentType')), namespaceprefix_ , eol_)) + if self.ContentDescription is not None: + namespaceprefix_ = self.ContentDescription_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentDescription_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentDescription>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentDescription), input_name='ContentDescription')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ContentType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentType') + value_ = self.gds_validate_string(value_, node, 'ContentType') + self.ContentType = value_ + self.ContentType_nsprefix_ = child_.prefix + elif nodeName_ == 'ContentDescription': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentDescription') + value_ = self.gds_validate_string(value_, node, 'ContentDescription') + self.ContentDescription = value_ + self.ContentDescription_nsprefix_ = child_.prefix +# end class ContentType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'IntlRateV2Request' + rootClass = IntlRateV2Request + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'IntlRateV2Request' + rootClass = IntlRateV2Request + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'IntlRateV2Request' + rootClass = IntlRateV2Request + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'IntlRateV2Request' + rootClass = IntlRateV2Request + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from intl_rate_v2_request import *\n\n') + sys.stdout.write('import intl_rate_v2_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "ContentType", + "ExtraServicesType", + "GXGType", + "IntlRateV2Request", + "PackageType" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/intl_rate_v2_response.py b/modules/connectors/usps_international/karrio/schemas/usps_international/intl_rate_v2_response.py new file mode 100644 index 0000000000..30ae3b9fb7 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/intl_rate_v2_response.py @@ -0,0 +1,2877 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:05 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/intl_rate_v2_response.py') +# +# Command line arguments: +# ./schemas/IntlRateV2Response.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/intl_rate_v2_response.py" ./schemas/IntlRateV2Response.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class IntlRateV2Response(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Package=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if Package is None: + self.Package = [] + else: + self.Package = Package + self.Package_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, IntlRateV2Response) + if subclass is not None: + return subclass(*args_, **kwargs_) + if IntlRateV2Response.subclass: + return IntlRateV2Response.subclass(*args_, **kwargs_) + else: + return IntlRateV2Response(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Package(self): + return self.Package + def set_Package(self, Package): + self.Package = Package + def add_Package(self, value): + self.Package.append(value) + def insert_Package_at(self, index, value): + self.Package.insert(index, value) + def replace_Package_at(self, index, value): + self.Package[index] = value + def has__content(self): + if ( + self.Package + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IntlRateV2Response', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('IntlRateV2Response') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'IntlRateV2Response': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IntlRateV2Response') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IntlRateV2Response', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IntlRateV2Response'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IntlRateV2Response', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for Package_ in self.Package: + namespaceprefix_ = self.Package_nsprefix_ + ':' if (UseCapturedNS_ and self.Package_nsprefix_) else '' + Package_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Package', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Package': + obj_ = PackageType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Package.append(obj_) + obj_.original_tagname_ = 'Package' +# end class IntlRateV2Response + + +class PackageType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ID=None, Prohibitions=None, Restrictions=None, Observations=None, CustomsForms=None, ExpressMail=None, AreasServed=None, AdditionalRestrictions=None, Content=None, Service=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ID = _cast(None, ID) + self.ID_nsprefix_ = None + self.Prohibitions = Prohibitions + self.Prohibitions_nsprefix_ = None + self.Restrictions = Restrictions + self.Restrictions_nsprefix_ = None + self.Observations = Observations + self.Observations_nsprefix_ = None + self.CustomsForms = CustomsForms + self.CustomsForms_nsprefix_ = None + self.ExpressMail = ExpressMail + self.ExpressMail_nsprefix_ = None + self.AreasServed = AreasServed + self.AreasServed_nsprefix_ = None + self.AdditionalRestrictions = AdditionalRestrictions + self.AdditionalRestrictions_nsprefix_ = None + self.Content = Content + self.Content_nsprefix_ = None + if Service is None: + self.Service = [] + else: + self.Service = Service + self.Service_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PackageType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PackageType.subclass: + return PackageType.subclass(*args_, **kwargs_) + else: + return PackageType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Prohibitions(self): + return self.Prohibitions + def set_Prohibitions(self, Prohibitions): + self.Prohibitions = Prohibitions + def get_Restrictions(self): + return self.Restrictions + def set_Restrictions(self, Restrictions): + self.Restrictions = Restrictions + def get_Observations(self): + return self.Observations + def set_Observations(self, Observations): + self.Observations = Observations + def get_CustomsForms(self): + return self.CustomsForms + def set_CustomsForms(self, CustomsForms): + self.CustomsForms = CustomsForms + def get_ExpressMail(self): + return self.ExpressMail + def set_ExpressMail(self, ExpressMail): + self.ExpressMail = ExpressMail + def get_AreasServed(self): + return self.AreasServed + def set_AreasServed(self, AreasServed): + self.AreasServed = AreasServed + def get_AdditionalRestrictions(self): + return self.AdditionalRestrictions + def set_AdditionalRestrictions(self, AdditionalRestrictions): + self.AdditionalRestrictions = AdditionalRestrictions + def get_Content(self): + return self.Content + def set_Content(self, Content): + self.Content = Content + def get_Service(self): + return self.Service + def set_Service(self, Service): + self.Service = Service + def add_Service(self, value): + self.Service.append(value) + def insert_Service_at(self, index, value): + self.Service.insert(index, value) + def replace_Service_at(self, index, value): + self.Service[index] = value + def get_ID(self): + return self.ID + def set_ID(self, ID): + self.ID = ID + def has__content(self): + if ( + self.Prohibitions is not None or + self.Restrictions is not None or + self.Observations is not None or + self.CustomsForms is not None or + self.ExpressMail is not None or + self.AreasServed is not None or + self.AdditionalRestrictions is not None or + self.Content is not None or + self.Service + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PackageType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PackageType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PackageType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PackageType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PackageType'): + if self.ID is not None and 'ID' not in already_processed: + already_processed.add('ID') + outfile.write(' ID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ID), input_name='ID')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Prohibitions is not None: + namespaceprefix_ = self.Prohibitions_nsprefix_ + ':' if (UseCapturedNS_ and self.Prohibitions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sProhibitions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Prohibitions), input_name='Prohibitions')), namespaceprefix_ , eol_)) + if self.Restrictions is not None: + namespaceprefix_ = self.Restrictions_nsprefix_ + ':' if (UseCapturedNS_ and self.Restrictions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRestrictions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Restrictions), input_name='Restrictions')), namespaceprefix_ , eol_)) + if self.Observations is not None: + namespaceprefix_ = self.Observations_nsprefix_ + ':' if (UseCapturedNS_ and self.Observations_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sObservations>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Observations), input_name='Observations')), namespaceprefix_ , eol_)) + if self.CustomsForms is not None: + namespaceprefix_ = self.CustomsForms_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomsForms_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomsForms>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomsForms), input_name='CustomsForms')), namespaceprefix_ , eol_)) + if self.ExpressMail is not None: + namespaceprefix_ = self.ExpressMail_nsprefix_ + ':' if (UseCapturedNS_ and self.ExpressMail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sExpressMail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ExpressMail), input_name='ExpressMail')), namespaceprefix_ , eol_)) + if self.AreasServed is not None: + namespaceprefix_ = self.AreasServed_nsprefix_ + ':' if (UseCapturedNS_ and self.AreasServed_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAreasServed>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AreasServed), input_name='AreasServed')), namespaceprefix_ , eol_)) + if self.AdditionalRestrictions is not None: + namespaceprefix_ = self.AdditionalRestrictions_nsprefix_ + ':' if (UseCapturedNS_ and self.AdditionalRestrictions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAdditionalRestrictions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AdditionalRestrictions), input_name='AdditionalRestrictions')), namespaceprefix_ , eol_)) + if self.Content is not None: + namespaceprefix_ = self.Content_nsprefix_ + ':' if (UseCapturedNS_ and self.Content_nsprefix_) else '' + self.Content.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Content', pretty_print=pretty_print) + for Service_ in self.Service: + namespaceprefix_ = self.Service_nsprefix_ + ':' if (UseCapturedNS_ and self.Service_nsprefix_) else '' + Service_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Service', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ID', node) + if value is not None and 'ID' not in already_processed: + already_processed.add('ID') + self.ID = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Prohibitions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Prohibitions') + value_ = self.gds_validate_string(value_, node, 'Prohibitions') + self.Prohibitions = value_ + self.Prohibitions_nsprefix_ = child_.prefix + elif nodeName_ == 'Restrictions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Restrictions') + value_ = self.gds_validate_string(value_, node, 'Restrictions') + self.Restrictions = value_ + self.Restrictions_nsprefix_ = child_.prefix + elif nodeName_ == 'Observations': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Observations') + value_ = self.gds_validate_string(value_, node, 'Observations') + self.Observations = value_ + self.Observations_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomsForms': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomsForms') + value_ = self.gds_validate_string(value_, node, 'CustomsForms') + self.CustomsForms = value_ + self.CustomsForms_nsprefix_ = child_.prefix + elif nodeName_ == 'ExpressMail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ExpressMail') + value_ = self.gds_validate_string(value_, node, 'ExpressMail') + self.ExpressMail = value_ + self.ExpressMail_nsprefix_ = child_.prefix + elif nodeName_ == 'AreasServed': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AreasServed') + value_ = self.gds_validate_string(value_, node, 'AreasServed') + self.AreasServed = value_ + self.AreasServed_nsprefix_ = child_.prefix + elif nodeName_ == 'AdditionalRestrictions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AdditionalRestrictions') + value_ = self.gds_validate_string(value_, node, 'AdditionalRestrictions') + self.AdditionalRestrictions = value_ + self.AdditionalRestrictions_nsprefix_ = child_.prefix + elif nodeName_ == 'Content': + obj_ = ContentType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Content = obj_ + obj_.original_tagname_ = 'Content' + elif nodeName_ == 'Service': + obj_ = ServiceType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Service.append(obj_) + obj_.original_tagname_ = 'Service' +# end class PackageType + + +class ContentType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ContentType_member=None, ContentDescription=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ContentType = ContentType_member + self.ContentType_nsprefix_ = None + self.ContentDescription = ContentDescription + self.ContentDescription_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ContentType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ContentType.subclass: + return ContentType.subclass(*args_, **kwargs_) + else: + return ContentType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ContentType(self): + return self.ContentType + def set_ContentType(self, ContentType): + self.ContentType = ContentType + def get_ContentDescription(self): + return self.ContentDescription + def set_ContentDescription(self, ContentDescription): + self.ContentDescription = ContentDescription + def has__content(self): + if ( + self.ContentType is not None or + self.ContentDescription is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContentType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ContentType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ContentType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContentType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ContentType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ContentType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContentType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ContentType is not None: + namespaceprefix_ = self.ContentType_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentType), input_name='ContentType')), namespaceprefix_ , eol_)) + if self.ContentDescription is not None: + namespaceprefix_ = self.ContentDescription_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentDescription_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentDescription>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentDescription), input_name='ContentDescription')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ContentType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentType') + value_ = self.gds_validate_string(value_, node, 'ContentType') + self.ContentType = value_ + self.ContentType_nsprefix_ = child_.prefix + elif nodeName_ == 'ContentDescription': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentDescription') + value_ = self.gds_validate_string(value_, node, 'ContentDescription') + self.ContentDescription = value_ + self.ContentDescription_nsprefix_ = child_.prefix +# end class ContentType + + +class ServiceType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ID=None, Pounds=None, Ounces=None, Machinable=None, MailType=None, GXG=None, Container=None, Width=None, Length=None, Height=None, Girth=None, Country=None, Postage=None, CommercialPostage=None, CommercialPlusPostage=None, ExtraServices=None, ValueOfContents=None, InsComment=None, ParcelIndemnityCoverage=None, SvcCommitments=None, SvcDescription=None, MaxDimensions=None, MaxWeight=None, GuaranteeAvailability=None, GXGLocations=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ID = _cast(int, ID) + self.ID_nsprefix_ = None + self.Pounds = Pounds + self.Pounds_nsprefix_ = None + self.Ounces = Ounces + self.Ounces_nsprefix_ = None + self.Machinable = Machinable + self.Machinable_nsprefix_ = None + self.MailType = MailType + self.MailType_nsprefix_ = None + self.GXG = GXG + self.GXG_nsprefix_ = None + self.Container = Container + self.Container_nsprefix_ = None + self.Width = Width + self.Width_nsprefix_ = None + self.Length = Length + self.Length_nsprefix_ = None + self.Height = Height + self.Height_nsprefix_ = None + self.Girth = Girth + self.Girth_nsprefix_ = None + self.Country = Country + self.Country_nsprefix_ = None + self.Postage = Postage + self.Postage_nsprefix_ = None + self.CommercialPostage = CommercialPostage + self.CommercialPostage_nsprefix_ = None + self.CommercialPlusPostage = CommercialPlusPostage + self.CommercialPlusPostage_nsprefix_ = None + self.ExtraServices = ExtraServices + self.ExtraServices_nsprefix_ = None + self.ValueOfContents = ValueOfContents + self.ValueOfContents_nsprefix_ = None + self.InsComment = InsComment + self.InsComment_nsprefix_ = None + self.ParcelIndemnityCoverage = ParcelIndemnityCoverage + self.ParcelIndemnityCoverage_nsprefix_ = None + self.SvcCommitments = SvcCommitments + self.SvcCommitments_nsprefix_ = None + self.SvcDescription = SvcDescription + self.SvcDescription_nsprefix_ = None + self.MaxDimensions = MaxDimensions + self.MaxDimensions_nsprefix_ = None + self.MaxWeight = MaxWeight + self.MaxWeight_nsprefix_ = None + self.GuaranteeAvailability = GuaranteeAvailability + self.GuaranteeAvailability_nsprefix_ = None + self.GXGLocations = GXGLocations + self.GXGLocations_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ServiceType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ServiceType.subclass: + return ServiceType.subclass(*args_, **kwargs_) + else: + return ServiceType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Pounds(self): + return self.Pounds + def set_Pounds(self, Pounds): + self.Pounds = Pounds + def get_Ounces(self): + return self.Ounces + def set_Ounces(self, Ounces): + self.Ounces = Ounces + def get_Machinable(self): + return self.Machinable + def set_Machinable(self, Machinable): + self.Machinable = Machinable + def get_MailType(self): + return self.MailType + def set_MailType(self, MailType): + self.MailType = MailType + def get_GXG(self): + return self.GXG + def set_GXG(self, GXG): + self.GXG = GXG + def get_Container(self): + return self.Container + def set_Container(self, Container): + self.Container = Container + def get_Width(self): + return self.Width + def set_Width(self, Width): + self.Width = Width + def get_Length(self): + return self.Length + def set_Length(self, Length): + self.Length = Length + def get_Height(self): + return self.Height + def set_Height(self, Height): + self.Height = Height + def get_Girth(self): + return self.Girth + def set_Girth(self, Girth): + self.Girth = Girth + def get_Country(self): + return self.Country + def set_Country(self, Country): + self.Country = Country + def get_Postage(self): + return self.Postage + def set_Postage(self, Postage): + self.Postage = Postage + def get_CommercialPostage(self): + return self.CommercialPostage + def set_CommercialPostage(self, CommercialPostage): + self.CommercialPostage = CommercialPostage + def get_CommercialPlusPostage(self): + return self.CommercialPlusPostage + def set_CommercialPlusPostage(self, CommercialPlusPostage): + self.CommercialPlusPostage = CommercialPlusPostage + def get_ExtraServices(self): + return self.ExtraServices + def set_ExtraServices(self, ExtraServices): + self.ExtraServices = ExtraServices + def get_ValueOfContents(self): + return self.ValueOfContents + def set_ValueOfContents(self, ValueOfContents): + self.ValueOfContents = ValueOfContents + def get_InsComment(self): + return self.InsComment + def set_InsComment(self, InsComment): + self.InsComment = InsComment + def get_ParcelIndemnityCoverage(self): + return self.ParcelIndemnityCoverage + def set_ParcelIndemnityCoverage(self, ParcelIndemnityCoverage): + self.ParcelIndemnityCoverage = ParcelIndemnityCoverage + def get_SvcCommitments(self): + return self.SvcCommitments + def set_SvcCommitments(self, SvcCommitments): + self.SvcCommitments = SvcCommitments + def get_SvcDescription(self): + return self.SvcDescription + def set_SvcDescription(self, SvcDescription): + self.SvcDescription = SvcDescription + def get_MaxDimensions(self): + return self.MaxDimensions + def set_MaxDimensions(self, MaxDimensions): + self.MaxDimensions = MaxDimensions + def get_MaxWeight(self): + return self.MaxWeight + def set_MaxWeight(self, MaxWeight): + self.MaxWeight = MaxWeight + def get_GuaranteeAvailability(self): + return self.GuaranteeAvailability + def set_GuaranteeAvailability(self, GuaranteeAvailability): + self.GuaranteeAvailability = GuaranteeAvailability + def get_GXGLocations(self): + return self.GXGLocations + def set_GXGLocations(self, GXGLocations): + self.GXGLocations = GXGLocations + def get_ID(self): + return self.ID + def set_ID(self, ID): + self.ID = ID + def has__content(self): + if ( + self.Pounds is not None or + self.Ounces is not None or + self.Machinable is not None or + self.MailType is not None or + self.GXG is not None or + self.Container is not None or + self.Width is not None or + self.Length is not None or + self.Height is not None or + self.Girth is not None or + self.Country is not None or + self.Postage is not None or + self.CommercialPostage is not None or + self.CommercialPlusPostage is not None or + self.ExtraServices is not None or + self.ValueOfContents is not None or + self.InsComment is not None or + self.ParcelIndemnityCoverage is not None or + self.SvcCommitments is not None or + self.SvcDescription is not None or + self.MaxDimensions is not None or + self.MaxWeight is not None or + self.GuaranteeAvailability is not None or + self.GXGLocations is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ServiceType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ServiceType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ServiceType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ServiceType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ServiceType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ServiceType'): + if self.ID is not None and 'ID' not in already_processed: + already_processed.add('ID') + outfile.write(' ID="%s"' % self.gds_format_integer(self.ID, input_name='ID')) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ServiceType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Pounds is not None: + namespaceprefix_ = self.Pounds_nsprefix_ + ':' if (UseCapturedNS_ and self.Pounds_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPounds>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Pounds, input_name='Pounds'), namespaceprefix_ , eol_)) + if self.Ounces is not None: + namespaceprefix_ = self.Ounces_nsprefix_ + ':' if (UseCapturedNS_ and self.Ounces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOunces>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Ounces, input_name='Ounces'), namespaceprefix_ , eol_)) + if self.Machinable is not None: + namespaceprefix_ = self.Machinable_nsprefix_ + ':' if (UseCapturedNS_ and self.Machinable_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMachinable>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Machinable), input_name='Machinable')), namespaceprefix_ , eol_)) + if self.MailType is not None: + namespaceprefix_ = self.MailType_nsprefix_ + ':' if (UseCapturedNS_ and self.MailType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMailType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MailType), input_name='MailType')), namespaceprefix_ , eol_)) + if self.GXG is not None: + namespaceprefix_ = self.GXG_nsprefix_ + ':' if (UseCapturedNS_ and self.GXG_nsprefix_) else '' + self.GXG.export(outfile, level, namespaceprefix_, namespacedef_='', name_='GXG', pretty_print=pretty_print) + if self.Container is not None: + namespaceprefix_ = self.Container_nsprefix_ + ':' if (UseCapturedNS_ and self.Container_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContainer>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Container), input_name='Container')), namespaceprefix_ , eol_)) + if self.Width is not None: + namespaceprefix_ = self.Width_nsprefix_ + ':' if (UseCapturedNS_ and self.Width_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sWidth>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Width, input_name='Width'), namespaceprefix_ , eol_)) + if self.Length is not None: + namespaceprefix_ = self.Length_nsprefix_ + ':' if (UseCapturedNS_ and self.Length_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLength>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Length, input_name='Length'), namespaceprefix_ , eol_)) + if self.Height is not None: + namespaceprefix_ = self.Height_nsprefix_ + ':' if (UseCapturedNS_ and self.Height_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHeight>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Height, input_name='Height'), namespaceprefix_ , eol_)) + if self.Girth is not None: + namespaceprefix_ = self.Girth_nsprefix_ + ':' if (UseCapturedNS_ and self.Girth_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGirth>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Girth, input_name='Girth'), namespaceprefix_ , eol_)) + if self.Country is not None: + namespaceprefix_ = self.Country_nsprefix_ + ':' if (UseCapturedNS_ and self.Country_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCountry>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Country), input_name='Country')), namespaceprefix_ , eol_)) + if self.Postage is not None: + namespaceprefix_ = self.Postage_nsprefix_ + ':' if (UseCapturedNS_ and self.Postage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPostage>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Postage, input_name='Postage'), namespaceprefix_ , eol_)) + if self.CommercialPostage is not None: + namespaceprefix_ = self.CommercialPostage_nsprefix_ + ':' if (UseCapturedNS_ and self.CommercialPostage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommercialPostage>%s%s' % (namespaceprefix_ , self.gds_format_float(self.CommercialPostage, input_name='CommercialPostage'), namespaceprefix_ , eol_)) + if self.CommercialPlusPostage is not None: + namespaceprefix_ = self.CommercialPlusPostage_nsprefix_ + ':' if (UseCapturedNS_ and self.CommercialPlusPostage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommercialPlusPostage>%s%s' % (namespaceprefix_ , self.gds_format_float(self.CommercialPlusPostage, input_name='CommercialPlusPostage'), namespaceprefix_ , eol_)) + if self.ExtraServices is not None: + namespaceprefix_ = self.ExtraServices_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraServices_nsprefix_) else '' + self.ExtraServices.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ExtraServices', pretty_print=pretty_print) + if self.ValueOfContents is not None: + namespaceprefix_ = self.ValueOfContents_nsprefix_ + ':' if (UseCapturedNS_ and self.ValueOfContents_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sValueOfContents>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValueOfContents, input_name='ValueOfContents'), namespaceprefix_ , eol_)) + if self.InsComment is not None: + namespaceprefix_ = self.InsComment_nsprefix_ + ':' if (UseCapturedNS_ and self.InsComment_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInsComment>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InsComment), input_name='InsComment')), namespaceprefix_ , eol_)) + if self.ParcelIndemnityCoverage is not None: + namespaceprefix_ = self.ParcelIndemnityCoverage_nsprefix_ + ':' if (UseCapturedNS_ and self.ParcelIndemnityCoverage_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sParcelIndemnityCoverage>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ParcelIndemnityCoverage, input_name='ParcelIndemnityCoverage'), namespaceprefix_ , eol_)) + if self.SvcCommitments is not None: + namespaceprefix_ = self.SvcCommitments_nsprefix_ + ':' if (UseCapturedNS_ and self.SvcCommitments_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSvcCommitments>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SvcCommitments), input_name='SvcCommitments')), namespaceprefix_ , eol_)) + if self.SvcDescription is not None: + namespaceprefix_ = self.SvcDescription_nsprefix_ + ':' if (UseCapturedNS_ and self.SvcDescription_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSvcDescription>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SvcDescription), input_name='SvcDescription')), namespaceprefix_ , eol_)) + if self.MaxDimensions is not None: + namespaceprefix_ = self.MaxDimensions_nsprefix_ + ':' if (UseCapturedNS_ and self.MaxDimensions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMaxDimensions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MaxDimensions), input_name='MaxDimensions')), namespaceprefix_ , eol_)) + if self.MaxWeight is not None: + namespaceprefix_ = self.MaxWeight_nsprefix_ + ':' if (UseCapturedNS_ and self.MaxWeight_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMaxWeight>%s%s' % (namespaceprefix_ , self.gds_format_float(self.MaxWeight, input_name='MaxWeight'), namespaceprefix_ , eol_)) + if self.GuaranteeAvailability is not None: + namespaceprefix_ = self.GuaranteeAvailability_nsprefix_ + ':' if (UseCapturedNS_ and self.GuaranteeAvailability_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGuaranteeAvailability>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.GuaranteeAvailability), input_name='GuaranteeAvailability')), namespaceprefix_ , eol_)) + if self.GXGLocations is not None: + namespaceprefix_ = self.GXGLocations_nsprefix_ + ':' if (UseCapturedNS_ and self.GXGLocations_nsprefix_) else '' + self.GXGLocations.export(outfile, level, namespaceprefix_, namespacedef_='', name_='GXGLocations', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ID', node) + if value is not None and 'ID' not in already_processed: + already_processed.add('ID') + self.ID = self.gds_parse_integer(value, node, 'ID') + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Pounds' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Pounds') + fval_ = self.gds_validate_float(fval_, node, 'Pounds') + self.Pounds = fval_ + self.Pounds_nsprefix_ = child_.prefix + elif nodeName_ == 'Ounces' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Ounces') + fval_ = self.gds_validate_float(fval_, node, 'Ounces') + self.Ounces = fval_ + self.Ounces_nsprefix_ = child_.prefix + elif nodeName_ == 'Machinable': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Machinable') + value_ = self.gds_validate_string(value_, node, 'Machinable') + self.Machinable = value_ + self.Machinable_nsprefix_ = child_.prefix + elif nodeName_ == 'MailType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'MailType') + value_ = self.gds_validate_string(value_, node, 'MailType') + self.MailType = value_ + self.MailType_nsprefix_ = child_.prefix + elif nodeName_ == 'GXG': + obj_ = GXGType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.GXG = obj_ + obj_.original_tagname_ = 'GXG' + elif nodeName_ == 'Container': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Container') + value_ = self.gds_validate_string(value_, node, 'Container') + self.Container = value_ + self.Container_nsprefix_ = child_.prefix + elif nodeName_ == 'Width' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Width') + fval_ = self.gds_validate_float(fval_, node, 'Width') + self.Width = fval_ + self.Width_nsprefix_ = child_.prefix + elif nodeName_ == 'Length' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Length') + fval_ = self.gds_validate_float(fval_, node, 'Length') + self.Length = fval_ + self.Length_nsprefix_ = child_.prefix + elif nodeName_ == 'Height' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Height') + fval_ = self.gds_validate_float(fval_, node, 'Height') + self.Height = fval_ + self.Height_nsprefix_ = child_.prefix + elif nodeName_ == 'Girth' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Girth') + fval_ = self.gds_validate_float(fval_, node, 'Girth') + self.Girth = fval_ + self.Girth_nsprefix_ = child_.prefix + elif nodeName_ == 'Country': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Country') + value_ = self.gds_validate_string(value_, node, 'Country') + self.Country = value_ + self.Country_nsprefix_ = child_.prefix + elif nodeName_ == 'Postage' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Postage') + fval_ = self.gds_validate_float(fval_, node, 'Postage') + self.Postage = fval_ + self.Postage_nsprefix_ = child_.prefix + elif nodeName_ == 'CommercialPostage' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'CommercialPostage') + fval_ = self.gds_validate_float(fval_, node, 'CommercialPostage') + self.CommercialPostage = fval_ + self.CommercialPostage_nsprefix_ = child_.prefix + elif nodeName_ == 'CommercialPlusPostage' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'CommercialPlusPostage') + fval_ = self.gds_validate_float(fval_, node, 'CommercialPlusPostage') + self.CommercialPlusPostage = fval_ + self.CommercialPlusPostage_nsprefix_ = child_.prefix + elif nodeName_ == 'ExtraServices': + obj_ = ExtraServicesType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ExtraServices = obj_ + obj_.original_tagname_ = 'ExtraServices' + elif nodeName_ == 'ValueOfContents' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'ValueOfContents') + fval_ = self.gds_validate_float(fval_, node, 'ValueOfContents') + self.ValueOfContents = fval_ + self.ValueOfContents_nsprefix_ = child_.prefix + elif nodeName_ == 'InsComment': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'InsComment') + value_ = self.gds_validate_string(value_, node, 'InsComment') + self.InsComment = value_ + self.InsComment_nsprefix_ = child_.prefix + elif nodeName_ == 'ParcelIndemnityCoverage' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'ParcelIndemnityCoverage') + fval_ = self.gds_validate_float(fval_, node, 'ParcelIndemnityCoverage') + self.ParcelIndemnityCoverage = fval_ + self.ParcelIndemnityCoverage_nsprefix_ = child_.prefix + elif nodeName_ == 'SvcCommitments': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SvcCommitments') + value_ = self.gds_validate_string(value_, node, 'SvcCommitments') + self.SvcCommitments = value_ + self.SvcCommitments_nsprefix_ = child_.prefix + elif nodeName_ == 'SvcDescription': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SvcDescription') + value_ = self.gds_validate_string(value_, node, 'SvcDescription') + self.SvcDescription = value_ + self.SvcDescription_nsprefix_ = child_.prefix + elif nodeName_ == 'MaxDimensions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'MaxDimensions') + value_ = self.gds_validate_string(value_, node, 'MaxDimensions') + self.MaxDimensions = value_ + self.MaxDimensions_nsprefix_ = child_.prefix + elif nodeName_ == 'MaxWeight' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'MaxWeight') + fval_ = self.gds_validate_float(fval_, node, 'MaxWeight') + self.MaxWeight = fval_ + self.MaxWeight_nsprefix_ = child_.prefix + elif nodeName_ == 'GuaranteeAvailability': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'GuaranteeAvailability') + value_ = self.gds_validate_string(value_, node, 'GuaranteeAvailability') + self.GuaranteeAvailability = value_ + self.GuaranteeAvailability_nsprefix_ = child_.prefix + elif nodeName_ == 'GXGLocations': + obj_ = GXGLocationsType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.GXGLocations = obj_ + obj_.original_tagname_ = 'GXGLocations' +# end class ServiceType + + +class GXGType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, POBoxFlag=None, GiftFlag=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.POBoxFlag = POBoxFlag + self.POBoxFlag_nsprefix_ = None + self.GiftFlag = GiftFlag + self.GiftFlag_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, GXGType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if GXGType.subclass: + return GXGType.subclass(*args_, **kwargs_) + else: + return GXGType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_POBoxFlag(self): + return self.POBoxFlag + def set_POBoxFlag(self, POBoxFlag): + self.POBoxFlag = POBoxFlag + def get_GiftFlag(self): + return self.GiftFlag + def set_GiftFlag(self, GiftFlag): + self.GiftFlag = GiftFlag + def has__content(self): + if ( + self.POBoxFlag is not None or + self.GiftFlag is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GXGType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('GXGType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'GXGType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GXGType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GXGType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GXGType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GXGType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.POBoxFlag is not None: + namespaceprefix_ = self.POBoxFlag_nsprefix_ + ':' if (UseCapturedNS_ and self.POBoxFlag_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPOBoxFlag>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.POBoxFlag), input_name='POBoxFlag')), namespaceprefix_ , eol_)) + if self.GiftFlag is not None: + namespaceprefix_ = self.GiftFlag_nsprefix_ + ':' if (UseCapturedNS_ and self.GiftFlag_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGiftFlag>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.GiftFlag), input_name='GiftFlag')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'POBoxFlag': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'POBoxFlag') + value_ = self.gds_validate_string(value_, node, 'POBoxFlag') + self.POBoxFlag = value_ + self.POBoxFlag_nsprefix_ = child_.prefix + elif nodeName_ == 'GiftFlag': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'GiftFlag') + value_ = self.gds_validate_string(value_, node, 'GiftFlag') + self.GiftFlag = value_ + self.GiftFlag_nsprefix_ = child_.prefix +# end class GXGType + + +class ExtraServicesType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ExtraService=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if ExtraService is None: + self.ExtraService = [] + else: + self.ExtraService = ExtraService + self.ExtraService_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExtraServicesType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExtraServicesType.subclass: + return ExtraServicesType.subclass(*args_, **kwargs_) + else: + return ExtraServicesType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ExtraService(self): + return self.ExtraService + def set_ExtraService(self, ExtraService): + self.ExtraService = ExtraService + def add_ExtraService(self, value): + self.ExtraService.append(value) + def insert_ExtraService_at(self, index, value): + self.ExtraService.insert(index, value) + def replace_ExtraService_at(self, index, value): + self.ExtraService[index] = value + def has__content(self): + if ( + self.ExtraService + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExtraServicesType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExtraServicesType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtraServicesType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtraServicesType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtraServicesType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServicesType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for ExtraService_ in self.ExtraService: + namespaceprefix_ = self.ExtraService_nsprefix_ + ':' if (UseCapturedNS_ and self.ExtraService_nsprefix_) else '' + ExtraService_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ExtraService', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ExtraService': + obj_ = ExtraServiceType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ExtraService.append(obj_) + obj_.original_tagname_ = 'ExtraService' +# end class ExtraServicesType + + +class ExtraServiceType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ServiceID=None, ServiceName=None, Available=None, OnlineAvailable=None, Price=None, OnlinePrice=None, DeclaredValueRequired=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ServiceID = ServiceID + self.ServiceID_nsprefix_ = None + self.ServiceName = ServiceName + self.ServiceName_nsprefix_ = None + self.Available = Available + self.Available_nsprefix_ = None + self.OnlineAvailable = OnlineAvailable + self.OnlineAvailable_nsprefix_ = None + self.Price = Price + self.Price_nsprefix_ = None + self.OnlinePrice = OnlinePrice + self.OnlinePrice_nsprefix_ = None + self.DeclaredValueRequired = DeclaredValueRequired + self.DeclaredValueRequired_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExtraServiceType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExtraServiceType.subclass: + return ExtraServiceType.subclass(*args_, **kwargs_) + else: + return ExtraServiceType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ServiceID(self): + return self.ServiceID + def set_ServiceID(self, ServiceID): + self.ServiceID = ServiceID + def get_ServiceName(self): + return self.ServiceName + def set_ServiceName(self, ServiceName): + self.ServiceName = ServiceName + def get_Available(self): + return self.Available + def set_Available(self, Available): + self.Available = Available + def get_OnlineAvailable(self): + return self.OnlineAvailable + def set_OnlineAvailable(self, OnlineAvailable): + self.OnlineAvailable = OnlineAvailable + def get_Price(self): + return self.Price + def set_Price(self, Price): + self.Price = Price + def get_OnlinePrice(self): + return self.OnlinePrice + def set_OnlinePrice(self, OnlinePrice): + self.OnlinePrice = OnlinePrice + def get_DeclaredValueRequired(self): + return self.DeclaredValueRequired + def set_DeclaredValueRequired(self, DeclaredValueRequired): + self.DeclaredValueRequired = DeclaredValueRequired + def has__content(self): + if ( + self.ServiceID is not None or + self.ServiceName is not None or + self.Available is not None or + self.OnlineAvailable is not None or + self.Price is not None or + self.OnlinePrice is not None or + self.DeclaredValueRequired is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServiceType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExtraServiceType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExtraServiceType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtraServiceType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtraServiceType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtraServiceType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtraServiceType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ServiceID is not None: + namespaceprefix_ = self.ServiceID_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceID), input_name='ServiceID')), namespaceprefix_ , eol_)) + if self.ServiceName is not None: + namespaceprefix_ = self.ServiceName_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceName), input_name='ServiceName')), namespaceprefix_ , eol_)) + if self.Available is not None: + namespaceprefix_ = self.Available_nsprefix_ + ':' if (UseCapturedNS_ and self.Available_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAvailable>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Available), input_name='Available')), namespaceprefix_ , eol_)) + if self.OnlineAvailable is not None: + namespaceprefix_ = self.OnlineAvailable_nsprefix_ + ':' if (UseCapturedNS_ and self.OnlineAvailable_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOnlineAvailable>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.OnlineAvailable), input_name='OnlineAvailable')), namespaceprefix_ , eol_)) + if self.Price is not None: + namespaceprefix_ = self.Price_nsprefix_ + ':' if (UseCapturedNS_ and self.Price_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPrice>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Price, input_name='Price'), namespaceprefix_ , eol_)) + if self.OnlinePrice is not None: + namespaceprefix_ = self.OnlinePrice_nsprefix_ + ':' if (UseCapturedNS_ and self.OnlinePrice_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOnlinePrice>%s%s' % (namespaceprefix_ , self.gds_format_float(self.OnlinePrice, input_name='OnlinePrice'), namespaceprefix_ , eol_)) + if self.DeclaredValueRequired is not None: + namespaceprefix_ = self.DeclaredValueRequired_nsprefix_ + ':' if (UseCapturedNS_ and self.DeclaredValueRequired_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDeclaredValueRequired>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DeclaredValueRequired), input_name='DeclaredValueRequired')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ServiceID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceID') + value_ = self.gds_validate_string(value_, node, 'ServiceID') + self.ServiceID = value_ + self.ServiceID_nsprefix_ = child_.prefix + elif nodeName_ == 'ServiceName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceName') + value_ = self.gds_validate_string(value_, node, 'ServiceName') + self.ServiceName = value_ + self.ServiceName_nsprefix_ = child_.prefix + elif nodeName_ == 'Available': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Available') + value_ = self.gds_validate_string(value_, node, 'Available') + self.Available = value_ + self.Available_nsprefix_ = child_.prefix + elif nodeName_ == 'OnlineAvailable': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'OnlineAvailable') + value_ = self.gds_validate_string(value_, node, 'OnlineAvailable') + self.OnlineAvailable = value_ + self.OnlineAvailable_nsprefix_ = child_.prefix + elif nodeName_ == 'Price' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Price') + fval_ = self.gds_validate_float(fval_, node, 'Price') + self.Price = fval_ + self.Price_nsprefix_ = child_.prefix + elif nodeName_ == 'OnlinePrice' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'OnlinePrice') + fval_ = self.gds_validate_float(fval_, node, 'OnlinePrice') + self.OnlinePrice = fval_ + self.OnlinePrice_nsprefix_ = child_.prefix + elif nodeName_ == 'DeclaredValueRequired': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DeclaredValueRequired') + value_ = self.gds_validate_string(value_, node, 'DeclaredValueRequired') + self.DeclaredValueRequired = value_ + self.DeclaredValueRequired_nsprefix_ = child_.prefix +# end class ExtraServiceType + + +class GXGLocationsType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, PostOffice=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.PostOffice = PostOffice + self.PostOffice_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, GXGLocationsType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if GXGLocationsType.subclass: + return GXGLocationsType.subclass(*args_, **kwargs_) + else: + return GXGLocationsType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_PostOffice(self): + return self.PostOffice + def set_PostOffice(self, PostOffice): + self.PostOffice = PostOffice + def has__content(self): + if ( + self.PostOffice is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GXGLocationsType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('GXGLocationsType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'GXGLocationsType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GXGLocationsType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GXGLocationsType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GXGLocationsType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GXGLocationsType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.PostOffice is not None: + namespaceprefix_ = self.PostOffice_nsprefix_ + ':' if (UseCapturedNS_ and self.PostOffice_nsprefix_) else '' + self.PostOffice.export(outfile, level, namespaceprefix_, namespacedef_='', name_='PostOffice', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'PostOffice': + obj_ = PostOfficeType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.PostOffice = obj_ + obj_.original_tagname_ = 'PostOffice' +# end class GXGLocationsType + + +class PostOfficeType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Name=None, Address=None, City=None, State=None, ZipCode=None, RetailGXGCutOffTime=None, SaturDayCutOffTime=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Name = Name + self.Name_nsprefix_ = None + self.Address = Address + self.Address_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.ZipCode = ZipCode + self.ZipCode_nsprefix_ = None + self.RetailGXGCutOffTime = RetailGXGCutOffTime + self.RetailGXGCutOffTime_nsprefix_ = None + self.SaturDayCutOffTime = SaturDayCutOffTime + self.SaturDayCutOffTime_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PostOfficeType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PostOfficeType.subclass: + return PostOfficeType.subclass(*args_, **kwargs_) + else: + return PostOfficeType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Name(self): + return self.Name + def set_Name(self, Name): + self.Name = Name + def get_Address(self): + return self.Address + def set_Address(self, Address): + self.Address = Address + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_ZipCode(self): + return self.ZipCode + def set_ZipCode(self, ZipCode): + self.ZipCode = ZipCode + def get_RetailGXGCutOffTime(self): + return self.RetailGXGCutOffTime + def set_RetailGXGCutOffTime(self, RetailGXGCutOffTime): + self.RetailGXGCutOffTime = RetailGXGCutOffTime + def get_SaturDayCutOffTime(self): + return self.SaturDayCutOffTime + def set_SaturDayCutOffTime(self, SaturDayCutOffTime): + self.SaturDayCutOffTime = SaturDayCutOffTime + def has__content(self): + if ( + self.Name is not None or + self.Address is not None or + self.City is not None or + self.State is not None or + self.ZipCode is not None or + self.RetailGXGCutOffTime is not None or + self.SaturDayCutOffTime is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PostOfficeType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PostOfficeType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PostOfficeType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PostOfficeType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PostOfficeType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PostOfficeType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PostOfficeType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Name is not None: + namespaceprefix_ = self.Name_nsprefix_ + ':' if (UseCapturedNS_ and self.Name_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Name), input_name='Name')), namespaceprefix_ , eol_)) + if self.Address is not None: + namespaceprefix_ = self.Address_nsprefix_ + ':' if (UseCapturedNS_ and self.Address_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address), input_name='Address')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.ZipCode is not None: + namespaceprefix_ = self.ZipCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ZipCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZipCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZipCode), input_name='ZipCode')), namespaceprefix_ , eol_)) + if self.RetailGXGCutOffTime is not None: + namespaceprefix_ = self.RetailGXGCutOffTime_nsprefix_ + ':' if (UseCapturedNS_ and self.RetailGXGCutOffTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRetailGXGCutOffTime>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RetailGXGCutOffTime), input_name='RetailGXGCutOffTime')), namespaceprefix_ , eol_)) + if self.SaturDayCutOffTime is not None: + namespaceprefix_ = self.SaturDayCutOffTime_nsprefix_ + ':' if (UseCapturedNS_ and self.SaturDayCutOffTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSaturDayCutOffTime>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SaturDayCutOffTime), input_name='SaturDayCutOffTime')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Name': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Name') + value_ = self.gds_validate_string(value_, node, 'Name') + self.Name = value_ + self.Name_nsprefix_ = child_.prefix + elif nodeName_ == 'Address': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address') + value_ = self.gds_validate_string(value_, node, 'Address') + self.Address = value_ + self.Address_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'ZipCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZipCode') + value_ = self.gds_validate_string(value_, node, 'ZipCode') + self.ZipCode = value_ + self.ZipCode_nsprefix_ = child_.prefix + elif nodeName_ == 'RetailGXGCutOffTime': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RetailGXGCutOffTime') + value_ = self.gds_validate_string(value_, node, 'RetailGXGCutOffTime') + self.RetailGXGCutOffTime = value_ + self.RetailGXGCutOffTime_nsprefix_ = child_.prefix + elif nodeName_ == 'SaturDayCutOffTime': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SaturDayCutOffTime') + value_ = self.gds_validate_string(value_, node, 'SaturDayCutOffTime') + self.SaturDayCutOffTime = value_ + self.SaturDayCutOffTime_nsprefix_ = child_.prefix +# end class PostOfficeType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'IntlRateV2Response' + rootClass = IntlRateV2Response + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'IntlRateV2Response' + rootClass = IntlRateV2Response + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'IntlRateV2Response' + rootClass = IntlRateV2Response + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'IntlRateV2Response' + rootClass = IntlRateV2Response + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from intl_rate_v2_response import *\n\n') + sys.stdout.write('import intl_rate_v2_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "ContentType", + "ExtraServiceType", + "ExtraServicesType", + "GXGLocationsType", + "GXGType", + "IntlRateV2Response", + "PackageType", + "PostOfficeType", + "ServiceType" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/mrsv4_0_request.py b/modules/connectors/usps_international/karrio/schemas/usps_international/mrsv4_0_request.py new file mode 100644 index 0000000000..8caefd06ac --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/mrsv4_0_request.py @@ -0,0 +1,1828 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:05 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/mrsv4_0_request.py') +# +# Command line arguments: +# ./schemas/MRSV4.0Request.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/mrsv4_0_request.py" ./schemas/MRSV4.0Request.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class MRSV4_0Request(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, Option=None, CustomerName=None, CustomerAddress1=None, CustomerAddress2=None, CustomerCity=None, CustomerState=None, CustomerZip5=None, CustomerZip4=None, RetailerName=None, RetailerAddress=None, PermitNumber=None, PermitIssuingPOCity=None, PermitIssuingPOState=None, PermitIssuingPOZip5=None, PDUFirmName=None, PDUPOBox=None, PDUCity=None, PDUState=None, PDUZip5=None, PDUZip4=None, ServiceType=None, DeliveryConfirmation=None, InsuranceValue=None, MailingAckPackageID=None, WeightInPounds=None, WeightInOunces=None, RMA=None, RMAPICFlag=None, ImageType=None, RMABarcode=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.Option = Option + self.Option_nsprefix_ = None + self.CustomerName = CustomerName + self.CustomerName_nsprefix_ = None + self.CustomerAddress1 = CustomerAddress1 + self.CustomerAddress1_nsprefix_ = None + self.CustomerAddress2 = CustomerAddress2 + self.CustomerAddress2_nsprefix_ = None + self.CustomerCity = CustomerCity + self.CustomerCity_nsprefix_ = None + self.CustomerState = CustomerState + self.CustomerState_nsprefix_ = None + self.CustomerZip5 = CustomerZip5 + self.CustomerZip5_nsprefix_ = None + self.CustomerZip4 = CustomerZip4 + self.CustomerZip4_nsprefix_ = None + self.RetailerName = RetailerName + self.RetailerName_nsprefix_ = None + self.RetailerAddress = RetailerAddress + self.RetailerAddress_nsprefix_ = None + self.PermitNumber = PermitNumber + self.PermitNumber_nsprefix_ = None + self.PermitIssuingPOCity = PermitIssuingPOCity + self.PermitIssuingPOCity_nsprefix_ = None + self.PermitIssuingPOState = PermitIssuingPOState + self.PermitIssuingPOState_nsprefix_ = None + self.PermitIssuingPOZip5 = PermitIssuingPOZip5 + self.PermitIssuingPOZip5_nsprefix_ = None + self.PDUFirmName = PDUFirmName + self.PDUFirmName_nsprefix_ = None + self.PDUPOBox = PDUPOBox + self.PDUPOBox_nsprefix_ = None + self.PDUCity = PDUCity + self.PDUCity_nsprefix_ = None + self.PDUState = PDUState + self.PDUState_nsprefix_ = None + self.PDUZip5 = PDUZip5 + self.PDUZip5_nsprefix_ = None + self.PDUZip4 = PDUZip4 + self.PDUZip4_nsprefix_ = None + self.ServiceType = ServiceType + self.ServiceType_nsprefix_ = None + self.DeliveryConfirmation = DeliveryConfirmation + self.DeliveryConfirmation_nsprefix_ = None + self.InsuranceValue = InsuranceValue + self.InsuranceValue_nsprefix_ = None + self.MailingAckPackageID = MailingAckPackageID + self.MailingAckPackageID_nsprefix_ = None + self.WeightInPounds = WeightInPounds + self.WeightInPounds_nsprefix_ = None + self.WeightInOunces = WeightInOunces + self.WeightInOunces_nsprefix_ = None + self.RMA = RMA + self.RMA_nsprefix_ = None + self.RMAPICFlag = RMAPICFlag + self.RMAPICFlag_nsprefix_ = None + self.ImageType = ImageType + self.ImageType_nsprefix_ = None + self.RMABarcode = RMABarcode + self.RMABarcode_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, MRSV4_0Request) + if subclass is not None: + return subclass(*args_, **kwargs_) + if MRSV4_0Request.subclass: + return MRSV4_0Request.subclass(*args_, **kwargs_) + else: + return MRSV4_0Request(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Option(self): + return self.Option + def set_Option(self, Option): + self.Option = Option + def get_CustomerName(self): + return self.CustomerName + def set_CustomerName(self, CustomerName): + self.CustomerName = CustomerName + def get_CustomerAddress1(self): + return self.CustomerAddress1 + def set_CustomerAddress1(self, CustomerAddress1): + self.CustomerAddress1 = CustomerAddress1 + def get_CustomerAddress2(self): + return self.CustomerAddress2 + def set_CustomerAddress2(self, CustomerAddress2): + self.CustomerAddress2 = CustomerAddress2 + def get_CustomerCity(self): + return self.CustomerCity + def set_CustomerCity(self, CustomerCity): + self.CustomerCity = CustomerCity + def get_CustomerState(self): + return self.CustomerState + def set_CustomerState(self, CustomerState): + self.CustomerState = CustomerState + def get_CustomerZip5(self): + return self.CustomerZip5 + def set_CustomerZip5(self, CustomerZip5): + self.CustomerZip5 = CustomerZip5 + def get_CustomerZip4(self): + return self.CustomerZip4 + def set_CustomerZip4(self, CustomerZip4): + self.CustomerZip4 = CustomerZip4 + def get_RetailerName(self): + return self.RetailerName + def set_RetailerName(self, RetailerName): + self.RetailerName = RetailerName + def get_RetailerAddress(self): + return self.RetailerAddress + def set_RetailerAddress(self, RetailerAddress): + self.RetailerAddress = RetailerAddress + def get_PermitNumber(self): + return self.PermitNumber + def set_PermitNumber(self, PermitNumber): + self.PermitNumber = PermitNumber + def get_PermitIssuingPOCity(self): + return self.PermitIssuingPOCity + def set_PermitIssuingPOCity(self, PermitIssuingPOCity): + self.PermitIssuingPOCity = PermitIssuingPOCity + def get_PermitIssuingPOState(self): + return self.PermitIssuingPOState + def set_PermitIssuingPOState(self, PermitIssuingPOState): + self.PermitIssuingPOState = PermitIssuingPOState + def get_PermitIssuingPOZip5(self): + return self.PermitIssuingPOZip5 + def set_PermitIssuingPOZip5(self, PermitIssuingPOZip5): + self.PermitIssuingPOZip5 = PermitIssuingPOZip5 + def get_PDUFirmName(self): + return self.PDUFirmName + def set_PDUFirmName(self, PDUFirmName): + self.PDUFirmName = PDUFirmName + def get_PDUPOBox(self): + return self.PDUPOBox + def set_PDUPOBox(self, PDUPOBox): + self.PDUPOBox = PDUPOBox + def get_PDUCity(self): + return self.PDUCity + def set_PDUCity(self, PDUCity): + self.PDUCity = PDUCity + def get_PDUState(self): + return self.PDUState + def set_PDUState(self, PDUState): + self.PDUState = PDUState + def get_PDUZip5(self): + return self.PDUZip5 + def set_PDUZip5(self, PDUZip5): + self.PDUZip5 = PDUZip5 + def get_PDUZip4(self): + return self.PDUZip4 + def set_PDUZip4(self, PDUZip4): + self.PDUZip4 = PDUZip4 + def get_ServiceType(self): + return self.ServiceType + def set_ServiceType(self, ServiceType): + self.ServiceType = ServiceType + def get_DeliveryConfirmation(self): + return self.DeliveryConfirmation + def set_DeliveryConfirmation(self, DeliveryConfirmation): + self.DeliveryConfirmation = DeliveryConfirmation + def get_InsuranceValue(self): + return self.InsuranceValue + def set_InsuranceValue(self, InsuranceValue): + self.InsuranceValue = InsuranceValue + def get_MailingAckPackageID(self): + return self.MailingAckPackageID + def set_MailingAckPackageID(self, MailingAckPackageID): + self.MailingAckPackageID = MailingAckPackageID + def get_WeightInPounds(self): + return self.WeightInPounds + def set_WeightInPounds(self, WeightInPounds): + self.WeightInPounds = WeightInPounds + def get_WeightInOunces(self): + return self.WeightInOunces + def set_WeightInOunces(self, WeightInOunces): + self.WeightInOunces = WeightInOunces + def get_RMA(self): + return self.RMA + def set_RMA(self, RMA): + self.RMA = RMA + def get_RMAPICFlag(self): + return self.RMAPICFlag + def set_RMAPICFlag(self, RMAPICFlag): + self.RMAPICFlag = RMAPICFlag + def get_ImageType(self): + return self.ImageType + def set_ImageType(self, ImageType): + self.ImageType = ImageType + def get_RMABarcode(self): + return self.RMABarcode + def set_RMABarcode(self, RMABarcode): + self.RMABarcode = RMABarcode + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.Option is not None or + self.CustomerName is not None or + self.CustomerAddress1 is not None or + self.CustomerAddress2 is not None or + self.CustomerCity is not None or + self.CustomerState is not None or + self.CustomerZip5 is not None or + self.CustomerZip4 is not None or + self.RetailerName is not None or + self.RetailerAddress is not None or + self.PermitNumber is not None or + self.PermitIssuingPOCity is not None or + self.PermitIssuingPOState is not None or + self.PermitIssuingPOZip5 is not None or + self.PDUFirmName is not None or + self.PDUPOBox is not None or + self.PDUCity is not None or + self.PDUState is not None or + self.PDUZip5 is not None or + self.PDUZip4 is not None or + self.ServiceType is not None or + self.DeliveryConfirmation is not None or + self.InsuranceValue is not None or + self.MailingAckPackageID is not None or + self.WeightInPounds is not None or + self.WeightInOunces is not None or + self.RMA is not None or + self.RMAPICFlag is not None or + self.ImageType is not None or + self.RMABarcode is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='MRSV4.0Request', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('MRSV4.0Request') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'MRSV4.0Request': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='MRSV4.0Request') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='MRSV4.0Request', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='MRSV4.0Request'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='MRSV4.0Request', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Option is not None: + namespaceprefix_ = self.Option_nsprefix_ + ':' if (UseCapturedNS_ and self.Option_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOption>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Option), input_name='Option')), namespaceprefix_ , eol_)) + if self.CustomerName is not None: + namespaceprefix_ = self.CustomerName_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerName), input_name='CustomerName')), namespaceprefix_ , eol_)) + if self.CustomerAddress1 is not None: + namespaceprefix_ = self.CustomerAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerAddress1), input_name='CustomerAddress1')), namespaceprefix_ , eol_)) + if self.CustomerAddress2 is not None: + namespaceprefix_ = self.CustomerAddress2_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerAddress2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerAddress2), input_name='CustomerAddress2')), namespaceprefix_ , eol_)) + if self.CustomerCity is not None: + namespaceprefix_ = self.CustomerCity_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerCity), input_name='CustomerCity')), namespaceprefix_ , eol_)) + if self.CustomerState is not None: + namespaceprefix_ = self.CustomerState_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerState), input_name='CustomerState')), namespaceprefix_ , eol_)) + if self.CustomerZip5 is not None: + namespaceprefix_ = self.CustomerZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerZip5>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.CustomerZip5, input_name='CustomerZip5'), namespaceprefix_ , eol_)) + if self.CustomerZip4 is not None: + namespaceprefix_ = self.CustomerZip4_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerZip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerZip4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerZip4), input_name='CustomerZip4')), namespaceprefix_ , eol_)) + if self.RetailerName is not None: + namespaceprefix_ = self.RetailerName_nsprefix_ + ':' if (UseCapturedNS_ and self.RetailerName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRetailerName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RetailerName), input_name='RetailerName')), namespaceprefix_ , eol_)) + if self.RetailerAddress is not None: + namespaceprefix_ = self.RetailerAddress_nsprefix_ + ':' if (UseCapturedNS_ and self.RetailerAddress_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRetailerAddress>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RetailerAddress), input_name='RetailerAddress')), namespaceprefix_ , eol_)) + if self.PermitNumber is not None: + namespaceprefix_ = self.PermitNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitNumber_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitNumber>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.PermitNumber, input_name='PermitNumber'), namespaceprefix_ , eol_)) + if self.PermitIssuingPOCity is not None: + namespaceprefix_ = self.PermitIssuingPOCity_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitIssuingPOCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitIssuingPOCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PermitIssuingPOCity), input_name='PermitIssuingPOCity')), namespaceprefix_ , eol_)) + if self.PermitIssuingPOState is not None: + namespaceprefix_ = self.PermitIssuingPOState_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitIssuingPOState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitIssuingPOState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PermitIssuingPOState), input_name='PermitIssuingPOState')), namespaceprefix_ , eol_)) + if self.PermitIssuingPOZip5 is not None: + namespaceprefix_ = self.PermitIssuingPOZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.PermitIssuingPOZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPermitIssuingPOZip5>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.PermitIssuingPOZip5, input_name='PermitIssuingPOZip5'), namespaceprefix_ , eol_)) + if self.PDUFirmName is not None: + namespaceprefix_ = self.PDUFirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.PDUFirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPDUFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PDUFirmName), input_name='PDUFirmName')), namespaceprefix_ , eol_)) + if self.PDUPOBox is not None: + namespaceprefix_ = self.PDUPOBox_nsprefix_ + ':' if (UseCapturedNS_ and self.PDUPOBox_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPDUPOBox>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PDUPOBox), input_name='PDUPOBox')), namespaceprefix_ , eol_)) + if self.PDUCity is not None: + namespaceprefix_ = self.PDUCity_nsprefix_ + ':' if (UseCapturedNS_ and self.PDUCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPDUCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PDUCity), input_name='PDUCity')), namespaceprefix_ , eol_)) + if self.PDUState is not None: + namespaceprefix_ = self.PDUState_nsprefix_ + ':' if (UseCapturedNS_ and self.PDUState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPDUState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PDUState), input_name='PDUState')), namespaceprefix_ , eol_)) + if self.PDUZip5 is not None: + namespaceprefix_ = self.PDUZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.PDUZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPDUZip5>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.PDUZip5, input_name='PDUZip5'), namespaceprefix_ , eol_)) + if self.PDUZip4 is not None: + namespaceprefix_ = self.PDUZip4_nsprefix_ + ':' if (UseCapturedNS_ and self.PDUZip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPDUZip4>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.PDUZip4, input_name='PDUZip4'), namespaceprefix_ , eol_)) + if self.ServiceType is not None: + namespaceprefix_ = self.ServiceType_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceType), input_name='ServiceType')), namespaceprefix_ , eol_)) + if self.DeliveryConfirmation is not None: + namespaceprefix_ = self.DeliveryConfirmation_nsprefix_ + ':' if (UseCapturedNS_ and self.DeliveryConfirmation_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDeliveryConfirmation>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DeliveryConfirmation), input_name='DeliveryConfirmation')), namespaceprefix_ , eol_)) + if self.InsuranceValue is not None: + namespaceprefix_ = self.InsuranceValue_nsprefix_ + ':' if (UseCapturedNS_ and self.InsuranceValue_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sInsuranceValue>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InsuranceValue), input_name='InsuranceValue')), namespaceprefix_ , eol_)) + if self.MailingAckPackageID is not None: + namespaceprefix_ = self.MailingAckPackageID_nsprefix_ + ':' if (UseCapturedNS_ and self.MailingAckPackageID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMailingAckPackageID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MailingAckPackageID), input_name='MailingAckPackageID')), namespaceprefix_ , eol_)) + if self.WeightInPounds is not None: + namespaceprefix_ = self.WeightInPounds_nsprefix_ + ':' if (UseCapturedNS_ and self.WeightInPounds_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sWeightInPounds>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.WeightInPounds, input_name='WeightInPounds'), namespaceprefix_ , eol_)) + if self.WeightInOunces is not None: + namespaceprefix_ = self.WeightInOunces_nsprefix_ + ':' if (UseCapturedNS_ and self.WeightInOunces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sWeightInOunces>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.WeightInOunces, input_name='WeightInOunces'), namespaceprefix_ , eol_)) + if self.RMA is not None: + namespaceprefix_ = self.RMA_nsprefix_ + ':' if (UseCapturedNS_ and self.RMA_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRMA>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RMA), input_name='RMA')), namespaceprefix_ , eol_)) + if self.RMAPICFlag is not None: + namespaceprefix_ = self.RMAPICFlag_nsprefix_ + ':' if (UseCapturedNS_ and self.RMAPICFlag_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRMAPICFlag>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RMAPICFlag), input_name='RMAPICFlag')), namespaceprefix_ , eol_)) + if self.ImageType is not None: + namespaceprefix_ = self.ImageType_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageType), input_name='ImageType')), namespaceprefix_ , eol_)) + if self.RMABarcode is not None: + namespaceprefix_ = self.RMABarcode_nsprefix_ + ':' if (UseCapturedNS_ and self.RMABarcode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRMABarcode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RMABarcode), input_name='RMABarcode')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Option': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Option') + value_ = self.gds_validate_string(value_, node, 'Option') + self.Option = value_ + self.Option_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerName') + value_ = self.gds_validate_string(value_, node, 'CustomerName') + self.CustomerName = value_ + self.CustomerName_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerAddress1') + value_ = self.gds_validate_string(value_, node, 'CustomerAddress1') + self.CustomerAddress1 = value_ + self.CustomerAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerAddress2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerAddress2') + value_ = self.gds_validate_string(value_, node, 'CustomerAddress2') + self.CustomerAddress2 = value_ + self.CustomerAddress2_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerCity') + value_ = self.gds_validate_string(value_, node, 'CustomerCity') + self.CustomerCity = value_ + self.CustomerCity_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerState') + value_ = self.gds_validate_string(value_, node, 'CustomerState') + self.CustomerState = value_ + self.CustomerState_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerZip5' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'CustomerZip5') + ival_ = self.gds_validate_integer(ival_, node, 'CustomerZip5') + self.CustomerZip5 = ival_ + self.CustomerZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerZip4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerZip4') + value_ = self.gds_validate_string(value_, node, 'CustomerZip4') + self.CustomerZip4 = value_ + self.CustomerZip4_nsprefix_ = child_.prefix + elif nodeName_ == 'RetailerName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RetailerName') + value_ = self.gds_validate_string(value_, node, 'RetailerName') + self.RetailerName = value_ + self.RetailerName_nsprefix_ = child_.prefix + elif nodeName_ == 'RetailerAddress': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RetailerAddress') + value_ = self.gds_validate_string(value_, node, 'RetailerAddress') + self.RetailerAddress = value_ + self.RetailerAddress_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitNumber' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'PermitNumber') + ival_ = self.gds_validate_integer(ival_, node, 'PermitNumber') + self.PermitNumber = ival_ + self.PermitNumber_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitIssuingPOCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PermitIssuingPOCity') + value_ = self.gds_validate_string(value_, node, 'PermitIssuingPOCity') + self.PermitIssuingPOCity = value_ + self.PermitIssuingPOCity_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitIssuingPOState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PermitIssuingPOState') + value_ = self.gds_validate_string(value_, node, 'PermitIssuingPOState') + self.PermitIssuingPOState = value_ + self.PermitIssuingPOState_nsprefix_ = child_.prefix + elif nodeName_ == 'PermitIssuingPOZip5' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'PermitIssuingPOZip5') + ival_ = self.gds_validate_integer(ival_, node, 'PermitIssuingPOZip5') + self.PermitIssuingPOZip5 = ival_ + self.PermitIssuingPOZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'PDUFirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PDUFirmName') + value_ = self.gds_validate_string(value_, node, 'PDUFirmName') + self.PDUFirmName = value_ + self.PDUFirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'PDUPOBox': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PDUPOBox') + value_ = self.gds_validate_string(value_, node, 'PDUPOBox') + self.PDUPOBox = value_ + self.PDUPOBox_nsprefix_ = child_.prefix + elif nodeName_ == 'PDUCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PDUCity') + value_ = self.gds_validate_string(value_, node, 'PDUCity') + self.PDUCity = value_ + self.PDUCity_nsprefix_ = child_.prefix + elif nodeName_ == 'PDUState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PDUState') + value_ = self.gds_validate_string(value_, node, 'PDUState') + self.PDUState = value_ + self.PDUState_nsprefix_ = child_.prefix + elif nodeName_ == 'PDUZip5' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'PDUZip5') + ival_ = self.gds_validate_integer(ival_, node, 'PDUZip5') + self.PDUZip5 = ival_ + self.PDUZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'PDUZip4' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'PDUZip4') + ival_ = self.gds_validate_integer(ival_, node, 'PDUZip4') + self.PDUZip4 = ival_ + self.PDUZip4_nsprefix_ = child_.prefix + elif nodeName_ == 'ServiceType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceType') + value_ = self.gds_validate_string(value_, node, 'ServiceType') + self.ServiceType = value_ + self.ServiceType_nsprefix_ = child_.prefix + elif nodeName_ == 'DeliveryConfirmation': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DeliveryConfirmation') + value_ = self.gds_validate_string(value_, node, 'DeliveryConfirmation') + self.DeliveryConfirmation = value_ + self.DeliveryConfirmation_nsprefix_ = child_.prefix + elif nodeName_ == 'InsuranceValue': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'InsuranceValue') + value_ = self.gds_validate_string(value_, node, 'InsuranceValue') + self.InsuranceValue = value_ + self.InsuranceValue_nsprefix_ = child_.prefix + elif nodeName_ == 'MailingAckPackageID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'MailingAckPackageID') + value_ = self.gds_validate_string(value_, node, 'MailingAckPackageID') + self.MailingAckPackageID = value_ + self.MailingAckPackageID_nsprefix_ = child_.prefix + elif nodeName_ == 'WeightInPounds' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'WeightInPounds') + ival_ = self.gds_validate_integer(ival_, node, 'WeightInPounds') + self.WeightInPounds = ival_ + self.WeightInPounds_nsprefix_ = child_.prefix + elif nodeName_ == 'WeightInOunces' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'WeightInOunces') + ival_ = self.gds_validate_integer(ival_, node, 'WeightInOunces') + self.WeightInOunces = ival_ + self.WeightInOunces_nsprefix_ = child_.prefix + elif nodeName_ == 'RMA': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RMA') + value_ = self.gds_validate_string(value_, node, 'RMA') + self.RMA = value_ + self.RMA_nsprefix_ = child_.prefix + elif nodeName_ == 'RMAPICFlag': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RMAPICFlag') + value_ = self.gds_validate_string(value_, node, 'RMAPICFlag') + self.RMAPICFlag = value_ + self.RMAPICFlag_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageType') + value_ = self.gds_validate_string(value_, node, 'ImageType') + self.ImageType = value_ + self.ImageType_nsprefix_ = child_.prefix + elif nodeName_ == 'RMABarcode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RMABarcode') + value_ = self.gds_validate_string(value_, node, 'RMABarcode') + self.RMABarcode = value_ + self.RMABarcode_nsprefix_ = child_.prefix +# end class MRSV4_0Request + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'MRSV4_0Request' + rootClass = MRSV4_0Request + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'MRSV4_0Request' + rootClass = MRSV4_0Request + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'MRSV4_0Request' + rootClass = MRSV4_0Request + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'MRSV4_0Request' + rootClass = MRSV4_0Request + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from mrsv4_0_request import *\n\n') + sys.stdout.write('import mrsv4_0_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "MRSV4_0Request" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/priority_mail_request.py b/modules/connectors/usps_international/karrio/schemas/usps_international/priority_mail_request.py new file mode 100644 index 0000000000..752ec4cc7e --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/priority_mail_request.py @@ -0,0 +1,1403 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:07 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/priority_mail_request.py') +# +# Command line arguments: +# ./schemas/PriorityMailRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/priority_mail_request.py" ./schemas/PriorityMailRequest.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class PriorityMailRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, OriginZip=None, DestinationZip=None, DestinationType=None, PMGuarantee=None, ClientType=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.OriginZip = OriginZip + self.OriginZip_nsprefix_ = None + self.DestinationZip = DestinationZip + self.DestinationZip_nsprefix_ = None + self.DestinationType = DestinationType + self.DestinationType_nsprefix_ = None + self.PMGuarantee = PMGuarantee + self.PMGuarantee_nsprefix_ = None + self.ClientType = ClientType + self.ClientType_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PriorityMailRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PriorityMailRequest.subclass: + return PriorityMailRequest.subclass(*args_, **kwargs_) + else: + return PriorityMailRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_OriginZip(self): + return self.OriginZip + def set_OriginZip(self, OriginZip): + self.OriginZip = OriginZip + def get_DestinationZip(self): + return self.DestinationZip + def set_DestinationZip(self, DestinationZip): + self.DestinationZip = DestinationZip + def get_DestinationType(self): + return self.DestinationType + def set_DestinationType(self, DestinationType): + self.DestinationType = DestinationType + def get_PMGuarantee(self): + return self.PMGuarantee + def set_PMGuarantee(self, PMGuarantee): + self.PMGuarantee = PMGuarantee + def get_ClientType(self): + return self.ClientType + def set_ClientType(self, ClientType): + self.ClientType = ClientType + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.OriginZip is not None or + self.DestinationZip is not None or + self.DestinationType is not None or + self.PMGuarantee is not None or + self.ClientType is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PriorityMailRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PriorityMailRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PriorityMailRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PriorityMailRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PriorityMailRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PriorityMailRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PriorityMailRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.OriginZip is not None: + namespaceprefix_ = self.OriginZip_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.OriginZip, input_name='OriginZip'), namespaceprefix_ , eol_)) + if self.DestinationZip is not None: + namespaceprefix_ = self.DestinationZip_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.DestinationZip, input_name='DestinationZip'), namespaceprefix_ , eol_)) + if self.DestinationType is not None: + namespaceprefix_ = self.DestinationType_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationType>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.DestinationType, input_name='DestinationType'), namespaceprefix_ , eol_)) + if self.PMGuarantee is not None: + namespaceprefix_ = self.PMGuarantee_nsprefix_ + ':' if (UseCapturedNS_ and self.PMGuarantee_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPMGuarantee>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PMGuarantee), input_name='PMGuarantee')), namespaceprefix_ , eol_)) + if self.ClientType is not None: + namespaceprefix_ = self.ClientType_nsprefix_ + ':' if (UseCapturedNS_ and self.ClientType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sClientType>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ClientType, input_name='ClientType'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'OriginZip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'OriginZip') + ival_ = self.gds_validate_integer(ival_, node, 'OriginZip') + self.OriginZip = ival_ + self.OriginZip_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationZip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'DestinationZip') + ival_ = self.gds_validate_integer(ival_, node, 'DestinationZip') + self.DestinationZip = ival_ + self.DestinationZip_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationType' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'DestinationType') + ival_ = self.gds_validate_integer(ival_, node, 'DestinationType') + self.DestinationType = ival_ + self.DestinationType_nsprefix_ = child_.prefix + elif nodeName_ == 'PMGuarantee': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PMGuarantee') + value_ = self.gds_validate_string(value_, node, 'PMGuarantee') + self.PMGuarantee = value_ + self.PMGuarantee_nsprefix_ = child_.prefix + elif nodeName_ == 'ClientType' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ClientType') + ival_ = self.gds_validate_integer(ival_, node, 'ClientType') + self.ClientType = ival_ + self.ClientType_nsprefix_ = child_.prefix +# end class PriorityMailRequest + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PriorityMailRequest' + rootClass = PriorityMailRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PriorityMailRequest' + rootClass = PriorityMailRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PriorityMailRequest' + rootClass = PriorityMailRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PriorityMailRequest' + rootClass = PriorityMailRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from priority_mail_request import *\n\n') + sys.stdout.write('import priority_mail_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "PriorityMailRequest" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/priority_mail_response.py b/modules/connectors/usps_international/karrio/schemas/usps_international/priority_mail_response.py new file mode 100644 index 0000000000..b498d4d39b --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/priority_mail_response.py @@ -0,0 +1,1396 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:07 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/priority_mail_response.py') +# +# Command line arguments: +# ./schemas/PriorityMailResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/priority_mail_response.py" ./schemas/PriorityMailResponse.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class PriorityMailResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, OriginZip=None, DestinationZip=None, Days=None, Message=None, EffectiveAcceptanceDate=None, ScheduledDeliveryDate=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.OriginZip = OriginZip + self.OriginZip_nsprefix_ = None + self.DestinationZip = DestinationZip + self.DestinationZip_nsprefix_ = None + self.Days = Days + self.Days_nsprefix_ = None + self.Message = Message + self.Message_nsprefix_ = None + self.EffectiveAcceptanceDate = EffectiveAcceptanceDate + self.EffectiveAcceptanceDate_nsprefix_ = None + self.ScheduledDeliveryDate = ScheduledDeliveryDate + self.ScheduledDeliveryDate_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PriorityMailResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PriorityMailResponse.subclass: + return PriorityMailResponse.subclass(*args_, **kwargs_) + else: + return PriorityMailResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_OriginZip(self): + return self.OriginZip + def set_OriginZip(self, OriginZip): + self.OriginZip = OriginZip + def get_DestinationZip(self): + return self.DestinationZip + def set_DestinationZip(self, DestinationZip): + self.DestinationZip = DestinationZip + def get_Days(self): + return self.Days + def set_Days(self, Days): + self.Days = Days + def get_Message(self): + return self.Message + def set_Message(self, Message): + self.Message = Message + def get_EffectiveAcceptanceDate(self): + return self.EffectiveAcceptanceDate + def set_EffectiveAcceptanceDate(self, EffectiveAcceptanceDate): + self.EffectiveAcceptanceDate = EffectiveAcceptanceDate + def get_ScheduledDeliveryDate(self): + return self.ScheduledDeliveryDate + def set_ScheduledDeliveryDate(self, ScheduledDeliveryDate): + self.ScheduledDeliveryDate = ScheduledDeliveryDate + def has__content(self): + if ( + self.OriginZip is not None or + self.DestinationZip is not None or + self.Days is not None or + self.Message is not None or + self.EffectiveAcceptanceDate is not None or + self.ScheduledDeliveryDate is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PriorityMailResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PriorityMailResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PriorityMailResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PriorityMailResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PriorityMailResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PriorityMailResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PriorityMailResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.OriginZip is not None: + namespaceprefix_ = self.OriginZip_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.OriginZip, input_name='OriginZip'), namespaceprefix_ , eol_)) + if self.DestinationZip is not None: + namespaceprefix_ = self.DestinationZip_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.DestinationZip, input_name='DestinationZip'), namespaceprefix_ , eol_)) + if self.Days is not None: + namespaceprefix_ = self.Days_nsprefix_ + ':' if (UseCapturedNS_ and self.Days_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDays>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Days, input_name='Days'), namespaceprefix_ , eol_)) + if self.Message is not None: + namespaceprefix_ = self.Message_nsprefix_ + ':' if (UseCapturedNS_ and self.Message_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMessage>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Message), input_name='Message')), namespaceprefix_ , eol_)) + if self.EffectiveAcceptanceDate is not None: + namespaceprefix_ = self.EffectiveAcceptanceDate_nsprefix_ + ':' if (UseCapturedNS_ and self.EffectiveAcceptanceDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEffectiveAcceptanceDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EffectiveAcceptanceDate), input_name='EffectiveAcceptanceDate')), namespaceprefix_ , eol_)) + if self.ScheduledDeliveryDate is not None: + namespaceprefix_ = self.ScheduledDeliveryDate_nsprefix_ + ':' if (UseCapturedNS_ and self.ScheduledDeliveryDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sScheduledDeliveryDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ScheduledDeliveryDate), input_name='ScheduledDeliveryDate')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'OriginZip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'OriginZip') + ival_ = self.gds_validate_integer(ival_, node, 'OriginZip') + self.OriginZip = ival_ + self.OriginZip_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationZip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'DestinationZip') + ival_ = self.gds_validate_integer(ival_, node, 'DestinationZip') + self.DestinationZip = ival_ + self.DestinationZip_nsprefix_ = child_.prefix + elif nodeName_ == 'Days' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Days') + ival_ = self.gds_validate_integer(ival_, node, 'Days') + self.Days = ival_ + self.Days_nsprefix_ = child_.prefix + elif nodeName_ == 'Message': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Message') + value_ = self.gds_validate_string(value_, node, 'Message') + self.Message = value_ + self.Message_nsprefix_ = child_.prefix + elif nodeName_ == 'EffectiveAcceptanceDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EffectiveAcceptanceDate') + value_ = self.gds_validate_string(value_, node, 'EffectiveAcceptanceDate') + self.EffectiveAcceptanceDate = value_ + self.EffectiveAcceptanceDate_nsprefix_ = child_.prefix + elif nodeName_ == 'ScheduledDeliveryDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ScheduledDeliveryDate') + value_ = self.gds_validate_string(value_, node, 'ScheduledDeliveryDate') + self.ScheduledDeliveryDate = value_ + self.ScheduledDeliveryDate_nsprefix_ = child_.prefix +# end class PriorityMailResponse + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PriorityMailResponse' + rootClass = PriorityMailResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PriorityMailResponse' + rootClass = PriorityMailResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PriorityMailResponse' + rootClass = PriorityMailResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PriorityMailResponse' + rootClass = PriorityMailResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from priority_mail_response import *\n\n') + sys.stdout.write('import priority_mail_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "PriorityMailResponse" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/pts_email_request.py b/modules/connectors/usps_international/karrio/schemas/usps_international/pts_email_request.py new file mode 100644 index 0000000000..283837d44f --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/pts_email_request.py @@ -0,0 +1,1497 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:05 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/pts_email_request.py') +# +# Command line arguments: +# ./schemas/PTSEmailRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/pts_email_request.py" ./schemas/PTSEmailRequest.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class PTSEmailRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, TrackId=None, ClientIp=None, MpSuffix=None, MpDate=None, RequestType=None, FirstName=None, LastName=None, Email1=None, Email2=None, Email3=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.TrackId = TrackId + self.TrackId_nsprefix_ = None + self.ClientIp = ClientIp + self.ClientIp_nsprefix_ = None + self.MpSuffix = MpSuffix + self.MpSuffix_nsprefix_ = None + self.MpDate = MpDate + self.MpDate_nsprefix_ = None + if RequestType is None: + self.RequestType = [] + else: + self.RequestType = RequestType + self.RequestType_nsprefix_ = None + self.FirstName = FirstName + self.FirstName_nsprefix_ = None + self.LastName = LastName + self.LastName_nsprefix_ = None + self.Email1 = Email1 + self.Email1_nsprefix_ = None + self.Email2 = Email2 + self.Email2_nsprefix_ = None + self.Email3 = Email3 + self.Email3_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PTSEmailRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PTSEmailRequest.subclass: + return PTSEmailRequest.subclass(*args_, **kwargs_) + else: + return PTSEmailRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_TrackId(self): + return self.TrackId + def set_TrackId(self, TrackId): + self.TrackId = TrackId + def get_ClientIp(self): + return self.ClientIp + def set_ClientIp(self, ClientIp): + self.ClientIp = ClientIp + def get_MpSuffix(self): + return self.MpSuffix + def set_MpSuffix(self, MpSuffix): + self.MpSuffix = MpSuffix + def get_MpDate(self): + return self.MpDate + def set_MpDate(self, MpDate): + self.MpDate = MpDate + def get_RequestType(self): + return self.RequestType + def set_RequestType(self, RequestType): + self.RequestType = RequestType + def add_RequestType(self, value): + self.RequestType.append(value) + def insert_RequestType_at(self, index, value): + self.RequestType.insert(index, value) + def replace_RequestType_at(self, index, value): + self.RequestType[index] = value + def get_FirstName(self): + return self.FirstName + def set_FirstName(self, FirstName): + self.FirstName = FirstName + def get_LastName(self): + return self.LastName + def set_LastName(self, LastName): + self.LastName = LastName + def get_Email1(self): + return self.Email1 + def set_Email1(self, Email1): + self.Email1 = Email1 + def get_Email2(self): + return self.Email2 + def set_Email2(self, Email2): + self.Email2 = Email2 + def get_Email3(self): + return self.Email3 + def set_Email3(self, Email3): + self.Email3 = Email3 + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.TrackId is not None or + self.ClientIp is not None or + self.MpSuffix is not None or + self.MpDate is not None or + self.RequestType or + self.FirstName is not None or + self.LastName is not None or + self.Email1 is not None or + self.Email2 is not None or + self.Email3 is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PTSEmailRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PTSEmailRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PTSEmailRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PTSEmailRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PTSEmailRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PTSEmailRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PTSEmailRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.TrackId is not None: + namespaceprefix_ = self.TrackId_nsprefix_ + ':' if (UseCapturedNS_ and self.TrackId_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTrackId>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TrackId), input_name='TrackId')), namespaceprefix_ , eol_)) + if self.ClientIp is not None: + namespaceprefix_ = self.ClientIp_nsprefix_ + ':' if (UseCapturedNS_ and self.ClientIp_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sClientIp>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ClientIp), input_name='ClientIp')), namespaceprefix_ , eol_)) + if self.MpSuffix is not None: + namespaceprefix_ = self.MpSuffix_nsprefix_ + ':' if (UseCapturedNS_ and self.MpSuffix_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMpSuffix>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.MpSuffix, input_name='MpSuffix'), namespaceprefix_ , eol_)) + if self.MpDate is not None: + namespaceprefix_ = self.MpDate_nsprefix_ + ':' if (UseCapturedNS_ and self.MpDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMpDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MpDate), input_name='MpDate')), namespaceprefix_ , eol_)) + for RequestType_ in self.RequestType: + namespaceprefix_ = self.RequestType_nsprefix_ + ':' if (UseCapturedNS_ and self.RequestType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRequestType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(RequestType_), input_name='RequestType')), namespaceprefix_ , eol_)) + if self.FirstName is not None: + namespaceprefix_ = self.FirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirstName), input_name='FirstName')), namespaceprefix_ , eol_)) + if self.LastName is not None: + namespaceprefix_ = self.LastName_nsprefix_ + ':' if (UseCapturedNS_ and self.LastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LastName), input_name='LastName')), namespaceprefix_ , eol_)) + if self.Email1 is not None: + namespaceprefix_ = self.Email1_nsprefix_ + ':' if (UseCapturedNS_ and self.Email1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEmail1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Email1), input_name='Email1')), namespaceprefix_ , eol_)) + if self.Email2 is not None: + namespaceprefix_ = self.Email2_nsprefix_ + ':' if (UseCapturedNS_ and self.Email2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEmail2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Email2), input_name='Email2')), namespaceprefix_ , eol_)) + if self.Email3 is not None: + namespaceprefix_ = self.Email3_nsprefix_ + ':' if (UseCapturedNS_ and self.Email3_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEmail3>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Email3), input_name='Email3')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'TrackId': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'TrackId') + value_ = self.gds_validate_string(value_, node, 'TrackId') + self.TrackId = value_ + self.TrackId_nsprefix_ = child_.prefix + elif nodeName_ == 'ClientIp': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ClientIp') + value_ = self.gds_validate_string(value_, node, 'ClientIp') + self.ClientIp = value_ + self.ClientIp_nsprefix_ = child_.prefix + elif nodeName_ == 'MpSuffix' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'MpSuffix') + ival_ = self.gds_validate_integer(ival_, node, 'MpSuffix') + self.MpSuffix = ival_ + self.MpSuffix_nsprefix_ = child_.prefix + elif nodeName_ == 'MpDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'MpDate') + value_ = self.gds_validate_string(value_, node, 'MpDate') + self.MpDate = value_ + self.MpDate_nsprefix_ = child_.prefix + elif nodeName_ == 'RequestType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RequestType') + value_ = self.gds_validate_string(value_, node, 'RequestType') + self.RequestType.append(value_) + self.RequestType_nsprefix_ = child_.prefix + elif nodeName_ == 'FirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirstName') + value_ = self.gds_validate_string(value_, node, 'FirstName') + self.FirstName = value_ + self.FirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'LastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LastName') + value_ = self.gds_validate_string(value_, node, 'LastName') + self.LastName = value_ + self.LastName_nsprefix_ = child_.prefix + elif nodeName_ == 'Email1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Email1') + value_ = self.gds_validate_string(value_, node, 'Email1') + self.Email1 = value_ + self.Email1_nsprefix_ = child_.prefix + elif nodeName_ == 'Email2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Email2') + value_ = self.gds_validate_string(value_, node, 'Email2') + self.Email2 = value_ + self.Email2_nsprefix_ = child_.prefix + elif nodeName_ == 'Email3': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Email3') + value_ = self.gds_validate_string(value_, node, 'Email3') + self.Email3 = value_ + self.Email3_nsprefix_ = child_.prefix +# end class PTSEmailRequest + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSEmailRequest' + rootClass = PTSEmailRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSEmailRequest' + rootClass = PTSEmailRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSEmailRequest' + rootClass = PTSEmailRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSEmailRequest' + rootClass = PTSEmailRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from pts_email_request import *\n\n') + sys.stdout.write('import pts_email_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "PTSEmailRequest" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/pts_emailresult.py b/modules/connectors/usps_international/karrio/schemas/usps_international/pts_emailresult.py new file mode 100644 index 0000000000..61cde9ccbf --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/pts_emailresult.py @@ -0,0 +1,1328 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:05 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/pts_emailresult.py') +# +# Command line arguments: +# ./schemas/PTSEmailResult.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/pts_emailresult.py" ./schemas/PTSEmailResult.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class PTSEMAILRESULT(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ResultText=None, ReturnCode=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ResultText = ResultText + self.ResultText_nsprefix_ = None + self.ReturnCode = ReturnCode + self.ReturnCode_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PTSEMAILRESULT) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PTSEMAILRESULT.subclass: + return PTSEMAILRESULT.subclass(*args_, **kwargs_) + else: + return PTSEMAILRESULT(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ResultText(self): + return self.ResultText + def set_ResultText(self, ResultText): + self.ResultText = ResultText + def get_ReturnCode(self): + return self.ReturnCode + def set_ReturnCode(self, ReturnCode): + self.ReturnCode = ReturnCode + def has__content(self): + if ( + self.ResultText is not None or + self.ReturnCode is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PTSEMAILRESULT', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PTSEMAILRESULT') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PTSEMAILRESULT': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PTSEMAILRESULT') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PTSEMAILRESULT', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PTSEMAILRESULT'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PTSEMAILRESULT', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ResultText is not None: + namespaceprefix_ = self.ResultText_nsprefix_ + ':' if (UseCapturedNS_ and self.ResultText_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sResultText>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ResultText), input_name='ResultText')), namespaceprefix_ , eol_)) + if self.ReturnCode is not None: + namespaceprefix_ = self.ReturnCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ReturnCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sReturnCode>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ReturnCode, input_name='ReturnCode'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ResultText': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ResultText') + value_ = self.gds_validate_string(value_, node, 'ResultText') + self.ResultText = value_ + self.ResultText_nsprefix_ = child_.prefix + elif nodeName_ == 'ReturnCode' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ReturnCode') + ival_ = self.gds_validate_integer(ival_, node, 'ReturnCode') + self.ReturnCode = ival_ + self.ReturnCode_nsprefix_ = child_.prefix +# end class PTSEMAILRESULT + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSEMAILRESULT' + rootClass = PTSEMAILRESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSEMAILRESULT' + rootClass = PTSEMAILRESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSEMAILRESULT' + rootClass = PTSEMAILRESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSEMAILRESULT' + rootClass = PTSEMAILRESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from pts_emailresult import *\n\n') + sys.stdout.write('import pts_emailresult as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "PTSEMAILRESULT" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/ptspod_result.py b/modules/connectors/usps_international/karrio/schemas/usps_international/ptspod_result.py new file mode 100644 index 0000000000..78a204c6bc --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/ptspod_result.py @@ -0,0 +1,1328 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:06 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/ptspod_result.py') +# +# Command line arguments: +# ./schemas/PTSPODResult.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/ptspod_result.py" ./schemas/PTSPODResult.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class PTSPODRESULT(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ResultText=None, ReturnCode=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ResultText = ResultText + self.ResultText_nsprefix_ = None + self.ReturnCode = ReturnCode + self.ReturnCode_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PTSPODRESULT) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PTSPODRESULT.subclass: + return PTSPODRESULT.subclass(*args_, **kwargs_) + else: + return PTSPODRESULT(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ResultText(self): + return self.ResultText + def set_ResultText(self, ResultText): + self.ResultText = ResultText + def get_ReturnCode(self): + return self.ReturnCode + def set_ReturnCode(self, ReturnCode): + self.ReturnCode = ReturnCode + def has__content(self): + if ( + self.ResultText is not None or + self.ReturnCode is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PTSPODRESULT', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PTSPODRESULT') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PTSPODRESULT': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PTSPODRESULT') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PTSPODRESULT', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PTSPODRESULT'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PTSPODRESULT', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ResultText is not None: + namespaceprefix_ = self.ResultText_nsprefix_ + ':' if (UseCapturedNS_ and self.ResultText_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sResultText>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ResultText), input_name='ResultText')), namespaceprefix_ , eol_)) + if self.ReturnCode is not None: + namespaceprefix_ = self.ReturnCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ReturnCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sReturnCode>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ReturnCode, input_name='ReturnCode'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ResultText': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ResultText') + value_ = self.gds_validate_string(value_, node, 'ResultText') + self.ResultText = value_ + self.ResultText_nsprefix_ = child_.prefix + elif nodeName_ == 'ReturnCode' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ReturnCode') + ival_ = self.gds_validate_integer(ival_, node, 'ReturnCode') + self.ReturnCode = ival_ + self.ReturnCode_nsprefix_ = child_.prefix +# end class PTSPODRESULT + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSPODRESULT' + rootClass = PTSPODRESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSPODRESULT' + rootClass = PTSPODRESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSPODRESULT' + rootClass = PTSPODRESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSPODRESULT' + rootClass = PTSPODRESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from ptspod_result import *\n\n') + sys.stdout.write('import ptspod_result as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "PTSPODRESULT" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/ptsrre_result.py b/modules/connectors/usps_international/karrio/schemas/usps_international/ptsrre_result.py new file mode 100644 index 0000000000..210682f686 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/ptsrre_result.py @@ -0,0 +1,1328 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:06 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/ptsrre_result.py') +# +# Command line arguments: +# ./schemas/PTSRREResult.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/ptsrre_result.py" ./schemas/PTSRREResult.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class PTSRRERESULT(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ResultText=None, ReturnCode=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ResultText = ResultText + self.ResultText_nsprefix_ = None + self.ReturnCode = ReturnCode + self.ReturnCode_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PTSRRERESULT) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PTSRRERESULT.subclass: + return PTSRRERESULT.subclass(*args_, **kwargs_) + else: + return PTSRRERESULT(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ResultText(self): + return self.ResultText + def set_ResultText(self, ResultText): + self.ResultText = ResultText + def get_ReturnCode(self): + return self.ReturnCode + def set_ReturnCode(self, ReturnCode): + self.ReturnCode = ReturnCode + def has__content(self): + if ( + self.ResultText is not None or + self.ReturnCode is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PTSRRERESULT', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PTSRRERESULT') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PTSRRERESULT': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PTSRRERESULT') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PTSRRERESULT', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PTSRRERESULT'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PTSRRERESULT', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ResultText is not None: + namespaceprefix_ = self.ResultText_nsprefix_ + ':' if (UseCapturedNS_ and self.ResultText_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sResultText>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ResultText), input_name='ResultText')), namespaceprefix_ , eol_)) + if self.ReturnCode is not None: + namespaceprefix_ = self.ReturnCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ReturnCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sReturnCode>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ReturnCode, input_name='ReturnCode'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ResultText': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ResultText') + value_ = self.gds_validate_string(value_, node, 'ResultText') + self.ResultText = value_ + self.ResultText_nsprefix_ = child_.prefix + elif nodeName_ == 'ReturnCode' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ReturnCode') + ival_ = self.gds_validate_integer(ival_, node, 'ReturnCode') + self.ReturnCode = ival_ + self.ReturnCode_nsprefix_ = child_.prefix +# end class PTSRRERESULT + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSRRERESULT' + rootClass = PTSRRERESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSRRERESULT' + rootClass = PTSRRERESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSRRERESULT' + rootClass = PTSRRERESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSRRERESULT' + rootClass = PTSRRERESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from ptsrre_result import *\n\n') + sys.stdout.write('import ptsrre_result as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "PTSRRERESULT" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/ptstpod_request.py b/modules/connectors/usps_international/karrio/schemas/usps_international/ptstpod_request.py new file mode 100644 index 0000000000..7f937ae491 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/ptstpod_request.py @@ -0,0 +1,1522 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:06 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/ptstpod_request.py') +# +# Command line arguments: +# ./schemas/PTSTPodRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/ptstpod_request.py" ./schemas/PTSTPodRequest.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class PTSTPodRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, TrackId=None, ClientIp=None, MpSuffix=None, MpDate=None, RequestType=None, FirstName=None, LastName=None, Email1=None, Email2=None, Email3=None, TableCode=None, CustRegID=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.TrackId = TrackId + self.TrackId_nsprefix_ = None + self.ClientIp = ClientIp + self.ClientIp_nsprefix_ = None + self.MpSuffix = MpSuffix + self.MpSuffix_nsprefix_ = None + self.MpDate = MpDate + self.MpDate_nsprefix_ = None + self.RequestType = RequestType + self.RequestType_nsprefix_ = None + self.FirstName = FirstName + self.FirstName_nsprefix_ = None + self.LastName = LastName + self.LastName_nsprefix_ = None + self.Email1 = Email1 + self.Email1_nsprefix_ = None + self.Email2 = Email2 + self.Email2_nsprefix_ = None + self.Email3 = Email3 + self.Email3_nsprefix_ = None + self.TableCode = TableCode + self.TableCode_nsprefix_ = None + self.CustRegID = CustRegID + self.CustRegID_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PTSTPodRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PTSTPodRequest.subclass: + return PTSTPodRequest.subclass(*args_, **kwargs_) + else: + return PTSTPodRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_TrackId(self): + return self.TrackId + def set_TrackId(self, TrackId): + self.TrackId = TrackId + def get_ClientIp(self): + return self.ClientIp + def set_ClientIp(self, ClientIp): + self.ClientIp = ClientIp + def get_MpSuffix(self): + return self.MpSuffix + def set_MpSuffix(self, MpSuffix): + self.MpSuffix = MpSuffix + def get_MpDate(self): + return self.MpDate + def set_MpDate(self, MpDate): + self.MpDate = MpDate + def get_RequestType(self): + return self.RequestType + def set_RequestType(self, RequestType): + self.RequestType = RequestType + def get_FirstName(self): + return self.FirstName + def set_FirstName(self, FirstName): + self.FirstName = FirstName + def get_LastName(self): + return self.LastName + def set_LastName(self, LastName): + self.LastName = LastName + def get_Email1(self): + return self.Email1 + def set_Email1(self, Email1): + self.Email1 = Email1 + def get_Email2(self): + return self.Email2 + def set_Email2(self, Email2): + self.Email2 = Email2 + def get_Email3(self): + return self.Email3 + def set_Email3(self, Email3): + self.Email3 = Email3 + def get_TableCode(self): + return self.TableCode + def set_TableCode(self, TableCode): + self.TableCode = TableCode + def get_CustRegID(self): + return self.CustRegID + def set_CustRegID(self, CustRegID): + self.CustRegID = CustRegID + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.TrackId is not None or + self.ClientIp is not None or + self.MpSuffix is not None or + self.MpDate is not None or + self.RequestType is not None or + self.FirstName is not None or + self.LastName is not None or + self.Email1 is not None or + self.Email2 is not None or + self.Email3 is not None or + self.TableCode is not None or + self.CustRegID is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PTSTPodRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PTSTPodRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PTSTPodRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PTSTPodRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PTSTPodRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PTSTPodRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PTSTPodRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.TrackId is not None: + namespaceprefix_ = self.TrackId_nsprefix_ + ':' if (UseCapturedNS_ and self.TrackId_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTrackId>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TrackId), input_name='TrackId')), namespaceprefix_ , eol_)) + if self.ClientIp is not None: + namespaceprefix_ = self.ClientIp_nsprefix_ + ':' if (UseCapturedNS_ and self.ClientIp_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sClientIp>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ClientIp), input_name='ClientIp')), namespaceprefix_ , eol_)) + if self.MpSuffix is not None: + namespaceprefix_ = self.MpSuffix_nsprefix_ + ':' if (UseCapturedNS_ and self.MpSuffix_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMpSuffix>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.MpSuffix, input_name='MpSuffix'), namespaceprefix_ , eol_)) + if self.MpDate is not None: + namespaceprefix_ = self.MpDate_nsprefix_ + ':' if (UseCapturedNS_ and self.MpDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMpDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MpDate), input_name='MpDate')), namespaceprefix_ , eol_)) + if self.RequestType is not None: + namespaceprefix_ = self.RequestType_nsprefix_ + ':' if (UseCapturedNS_ and self.RequestType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRequestType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RequestType), input_name='RequestType')), namespaceprefix_ , eol_)) + if self.FirstName is not None: + namespaceprefix_ = self.FirstName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirstName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirstName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirstName), input_name='FirstName')), namespaceprefix_ , eol_)) + if self.LastName is not None: + namespaceprefix_ = self.LastName_nsprefix_ + ':' if (UseCapturedNS_ and self.LastName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLastName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LastName), input_name='LastName')), namespaceprefix_ , eol_)) + if self.Email1 is not None: + namespaceprefix_ = self.Email1_nsprefix_ + ':' if (UseCapturedNS_ and self.Email1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEmail1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Email1), input_name='Email1')), namespaceprefix_ , eol_)) + if self.Email2 is not None: + namespaceprefix_ = self.Email2_nsprefix_ + ':' if (UseCapturedNS_ and self.Email2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEmail2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Email2), input_name='Email2')), namespaceprefix_ , eol_)) + if self.Email3 is not None: + namespaceprefix_ = self.Email3_nsprefix_ + ':' if (UseCapturedNS_ and self.Email3_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEmail3>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Email3), input_name='Email3')), namespaceprefix_ , eol_)) + if self.TableCode is not None: + namespaceprefix_ = self.TableCode_nsprefix_ + ':' if (UseCapturedNS_ and self.TableCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTableCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TableCode), input_name='TableCode')), namespaceprefix_ , eol_)) + if self.CustRegID is not None: + namespaceprefix_ = self.CustRegID_nsprefix_ + ':' if (UseCapturedNS_ and self.CustRegID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustRegID>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.CustRegID, input_name='CustRegID'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'TrackId': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'TrackId') + value_ = self.gds_validate_string(value_, node, 'TrackId') + self.TrackId = value_ + self.TrackId_nsprefix_ = child_.prefix + elif nodeName_ == 'ClientIp': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ClientIp') + value_ = self.gds_validate_string(value_, node, 'ClientIp') + self.ClientIp = value_ + self.ClientIp_nsprefix_ = child_.prefix + elif nodeName_ == 'MpSuffix' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'MpSuffix') + ival_ = self.gds_validate_integer(ival_, node, 'MpSuffix') + self.MpSuffix = ival_ + self.MpSuffix_nsprefix_ = child_.prefix + elif nodeName_ == 'MpDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'MpDate') + value_ = self.gds_validate_string(value_, node, 'MpDate') + self.MpDate = value_ + self.MpDate_nsprefix_ = child_.prefix + elif nodeName_ == 'RequestType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RequestType') + value_ = self.gds_validate_string(value_, node, 'RequestType') + self.RequestType = value_ + self.RequestType_nsprefix_ = child_.prefix + elif nodeName_ == 'FirstName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirstName') + value_ = self.gds_validate_string(value_, node, 'FirstName') + self.FirstName = value_ + self.FirstName_nsprefix_ = child_.prefix + elif nodeName_ == 'LastName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'LastName') + value_ = self.gds_validate_string(value_, node, 'LastName') + self.LastName = value_ + self.LastName_nsprefix_ = child_.prefix + elif nodeName_ == 'Email1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Email1') + value_ = self.gds_validate_string(value_, node, 'Email1') + self.Email1 = value_ + self.Email1_nsprefix_ = child_.prefix + elif nodeName_ == 'Email2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Email2') + value_ = self.gds_validate_string(value_, node, 'Email2') + self.Email2 = value_ + self.Email2_nsprefix_ = child_.prefix + elif nodeName_ == 'Email3': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Email3') + value_ = self.gds_validate_string(value_, node, 'Email3') + self.Email3 = value_ + self.Email3_nsprefix_ = child_.prefix + elif nodeName_ == 'TableCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'TableCode') + value_ = self.gds_validate_string(value_, node, 'TableCode') + self.TableCode = value_ + self.TableCode_nsprefix_ = child_.prefix + elif nodeName_ == 'CustRegID' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'CustRegID') + ival_ = self.gds_validate_integer(ival_, node, 'CustRegID') + self.CustRegID = ival_ + self.CustRegID_nsprefix_ = child_.prefix +# end class PTSTPodRequest + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSTPodRequest' + rootClass = PTSTPodRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSTPodRequest' + rootClass = PTSTPodRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSTPodRequest' + rootClass = PTSTPodRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSTPodRequest' + rootClass = PTSTPodRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from ptstpod_request import *\n\n') + sys.stdout.write('import ptstpod_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "PTSTPodRequest" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/ptstpod_result.py b/modules/connectors/usps_international/karrio/schemas/usps_international/ptstpod_result.py new file mode 100644 index 0000000000..f3ad09485e --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/ptstpod_result.py @@ -0,0 +1,1328 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:06 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/ptstpod_result.py') +# +# Command line arguments: +# ./schemas/PTSTPODResult.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/ptstpod_result.py" ./schemas/PTSTPODResult.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class PTSTPODRESULT(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ResultText=None, ReturnCode=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ResultText = ResultText + self.ResultText_nsprefix_ = None + self.ReturnCode = ReturnCode + self.ReturnCode_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PTSTPODRESULT) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PTSTPODRESULT.subclass: + return PTSTPODRESULT.subclass(*args_, **kwargs_) + else: + return PTSTPODRESULT(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ResultText(self): + return self.ResultText + def set_ResultText(self, ResultText): + self.ResultText = ResultText + def get_ReturnCode(self): + return self.ReturnCode + def set_ReturnCode(self, ReturnCode): + self.ReturnCode = ReturnCode + def has__content(self): + if ( + self.ResultText is not None or + self.ReturnCode is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PTSTPODRESULT', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PTSTPODRESULT') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PTSTPODRESULT': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PTSTPODRESULT') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PTSTPODRESULT', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PTSTPODRESULT'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PTSTPODRESULT', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ResultText is not None: + namespaceprefix_ = self.ResultText_nsprefix_ + ':' if (UseCapturedNS_ and self.ResultText_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sResultText>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ResultText), input_name='ResultText')), namespaceprefix_ , eol_)) + if self.ReturnCode is not None: + namespaceprefix_ = self.ReturnCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ReturnCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sReturnCode>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ReturnCode, input_name='ReturnCode'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ResultText': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ResultText') + value_ = self.gds_validate_string(value_, node, 'ResultText') + self.ResultText = value_ + self.ResultText_nsprefix_ = child_.prefix + elif nodeName_ == 'ReturnCode' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ReturnCode') + ival_ = self.gds_validate_integer(ival_, node, 'ReturnCode') + self.ReturnCode = ival_ + self.ReturnCode_nsprefix_ = child_.prefix +# end class PTSTPODRESULT + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSTPODRESULT' + rootClass = PTSTPODRESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSTPODRESULT' + rootClass = PTSTPODRESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSTPODRESULT' + rootClass = PTSTPODRESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PTSTPODRESULT' + rootClass = PTSTPODRESULT + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from ptstpod_result import *\n\n') + sys.stdout.write('import ptstpod_result as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "PTSTPODRESULT" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/rate_v4_request.py b/modules/connectors/usps_international/karrio/schemas/usps_international/rate_v4_request.py new file mode 100644 index 0000000000..70042bf709 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/rate_v4_request.py @@ -0,0 +1,2216 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:07 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/rate_v4_request.py') +# +# Command line arguments: +# ./schemas/RateV4Request.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/rate_v4_request.py" ./schemas/RateV4Request.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class RateV4Request(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, Revision=None, Package=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.Revision = Revision + self.Revision_nsprefix_ = None + if Package is None: + self.Package = [] + else: + self.Package = Package + self.Package_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, RateV4Request) + if subclass is not None: + return subclass(*args_, **kwargs_) + if RateV4Request.subclass: + return RateV4Request.subclass(*args_, **kwargs_) + else: + return RateV4Request(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Revision(self): + return self.Revision + def set_Revision(self, Revision): + self.Revision = Revision + def get_Package(self): + return self.Package + def set_Package(self, Package): + self.Package = Package + def add_Package(self, value): + self.Package.append(value) + def insert_Package_at(self, index, value): + self.Package.insert(index, value) + def replace_Package_at(self, index, value): + self.Package[index] = value + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.Revision is not None or + self.Package + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RateV4Request', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('RateV4Request') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'RateV4Request': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RateV4Request') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RateV4Request', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RateV4Request'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RateV4Request', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Revision is not None: + namespaceprefix_ = self.Revision_nsprefix_ + ':' if (UseCapturedNS_ and self.Revision_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRevision>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Revision), input_name='Revision')), namespaceprefix_ , eol_)) + for Package_ in self.Package: + namespaceprefix_ = self.Package_nsprefix_ + ':' if (UseCapturedNS_ and self.Package_nsprefix_) else '' + Package_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Package', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Revision': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Revision') + value_ = self.gds_validate_string(value_, node, 'Revision') + self.Revision = value_ + self.Revision_nsprefix_ = child_.prefix + elif nodeName_ == 'Package': + obj_ = PackageType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Package.append(obj_) + obj_.original_tagname_ = 'Package' +# end class RateV4Request + + +class PackageType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ID=None, Service=None, FirstClassMailType=None, ZipOrigination=None, ZipDestination=None, Pounds=None, Ounces=None, Container=None, Size=None, Width=None, Length=None, Height=None, Girth=None, Value=None, AmountToCollect=None, SpecialServices=None, Content=None, GroundOnly=None, SortBy=None, Machinable=None, ReturnLocations=None, ReturnServiceInfo=None, DropOffTime=None, ShipDate=None, ReturnDimensionalWeight=None, TrackingRetentionPeriod=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ID = _cast(None, ID) + self.ID_nsprefix_ = None + self.Service = Service + self.Service_nsprefix_ = None + self.FirstClassMailType = FirstClassMailType + self.FirstClassMailType_nsprefix_ = None + self.ZipOrigination = ZipOrigination + self.ZipOrigination_nsprefix_ = None + self.ZipDestination = ZipDestination + self.ZipDestination_nsprefix_ = None + self.Pounds = Pounds + self.validate_PoundsType(self.Pounds) + self.Pounds_nsprefix_ = None + self.Ounces = Ounces + self.validate_OuncesType(self.Ounces) + self.Ounces_nsprefix_ = None + self.Container = Container + self.Container_nsprefix_ = None + self.Size = Size + self.Size_nsprefix_ = None + self.Width = Width + self.Width_nsprefix_ = None + self.Length = Length + self.Length_nsprefix_ = None + self.Height = Height + self.Height_nsprefix_ = None + self.Girth = Girth + self.Girth_nsprefix_ = None + self.Value = Value + self.Value_nsprefix_ = None + self.AmountToCollect = AmountToCollect + self.AmountToCollect_nsprefix_ = None + self.SpecialServices = SpecialServices + self.SpecialServices_nsprefix_ = None + self.Content = Content + self.Content_nsprefix_ = None + self.GroundOnly = GroundOnly + self.GroundOnly_nsprefix_ = None + self.SortBy = SortBy + self.SortBy_nsprefix_ = None + self.Machinable = Machinable + self.Machinable_nsprefix_ = None + self.ReturnLocations = ReturnLocations + self.ReturnLocations_nsprefix_ = None + self.ReturnServiceInfo = ReturnServiceInfo + self.ReturnServiceInfo_nsprefix_ = None + self.DropOffTime = DropOffTime + self.DropOffTime_nsprefix_ = None + self.ShipDate = ShipDate + self.ShipDate_nsprefix_ = None + self.ReturnDimensionalWeight = ReturnDimensionalWeight + self.ReturnDimensionalWeight_nsprefix_ = None + self.TrackingRetentionPeriod = TrackingRetentionPeriod + self.TrackingRetentionPeriod_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PackageType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PackageType.subclass: + return PackageType.subclass(*args_, **kwargs_) + else: + return PackageType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Service(self): + return self.Service + def set_Service(self, Service): + self.Service = Service + def get_FirstClassMailType(self): + return self.FirstClassMailType + def set_FirstClassMailType(self, FirstClassMailType): + self.FirstClassMailType = FirstClassMailType + def get_ZipOrigination(self): + return self.ZipOrigination + def set_ZipOrigination(self, ZipOrigination): + self.ZipOrigination = ZipOrigination + def get_ZipDestination(self): + return self.ZipDestination + def set_ZipDestination(self, ZipDestination): + self.ZipDestination = ZipDestination + def get_Pounds(self): + return self.Pounds + def set_Pounds(self, Pounds): + self.Pounds = Pounds + def get_Ounces(self): + return self.Ounces + def set_Ounces(self, Ounces): + self.Ounces = Ounces + def get_Container(self): + return self.Container + def set_Container(self, Container): + self.Container = Container + def get_Size(self): + return self.Size + def set_Size(self, Size): + self.Size = Size + def get_Width(self): + return self.Width + def set_Width(self, Width): + self.Width = Width + def get_Length(self): + return self.Length + def set_Length(self, Length): + self.Length = Length + def get_Height(self): + return self.Height + def set_Height(self, Height): + self.Height = Height + def get_Girth(self): + return self.Girth + def set_Girth(self, Girth): + self.Girth = Girth + def get_Value(self): + return self.Value + def set_Value(self, Value): + self.Value = Value + def get_AmountToCollect(self): + return self.AmountToCollect + def set_AmountToCollect(self, AmountToCollect): + self.AmountToCollect = AmountToCollect + def get_SpecialServices(self): + return self.SpecialServices + def set_SpecialServices(self, SpecialServices): + self.SpecialServices = SpecialServices + def get_Content(self): + return self.Content + def set_Content(self, Content): + self.Content = Content + def get_GroundOnly(self): + return self.GroundOnly + def set_GroundOnly(self, GroundOnly): + self.GroundOnly = GroundOnly + def get_SortBy(self): + return self.SortBy + def set_SortBy(self, SortBy): + self.SortBy = SortBy + def get_Machinable(self): + return self.Machinable + def set_Machinable(self, Machinable): + self.Machinable = Machinable + def get_ReturnLocations(self): + return self.ReturnLocations + def set_ReturnLocations(self, ReturnLocations): + self.ReturnLocations = ReturnLocations + def get_ReturnServiceInfo(self): + return self.ReturnServiceInfo + def set_ReturnServiceInfo(self, ReturnServiceInfo): + self.ReturnServiceInfo = ReturnServiceInfo + def get_DropOffTime(self): + return self.DropOffTime + def set_DropOffTime(self, DropOffTime): + self.DropOffTime = DropOffTime + def get_ShipDate(self): + return self.ShipDate + def set_ShipDate(self, ShipDate): + self.ShipDate = ShipDate + def get_ReturnDimensionalWeight(self): + return self.ReturnDimensionalWeight + def set_ReturnDimensionalWeight(self, ReturnDimensionalWeight): + self.ReturnDimensionalWeight = ReturnDimensionalWeight + def get_TrackingRetentionPeriod(self): + return self.TrackingRetentionPeriod + def set_TrackingRetentionPeriod(self, TrackingRetentionPeriod): + self.TrackingRetentionPeriod = TrackingRetentionPeriod + def get_ID(self): + return self.ID + def set_ID(self, ID): + self.ID = ID + def validate_PoundsType(self, value): + result = True + # Validate type PoundsType, a restriction on xs:integer. + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' % {"value": value, "lineno": lineno, }) + return False + if value < 0: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on PoundsType' % {"value": value, "lineno": lineno} ) + result = False + if value > 70: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd maxInclusive restriction on PoundsType' % {"value": value, "lineno": lineno} ) + result = False + return result + def validate_OuncesType(self, value): + result = True + # Validate type OuncesType, a restriction on xs:decimal. + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, decimal_.Decimal): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (decimal_.Decimal)' % {"value": value, "lineno": lineno, }) + return False + if value < 0.0: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on OuncesType' % {"value": value, "lineno": lineno} ) + result = False + if value > 1120.0: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd maxInclusive restriction on OuncesType' % {"value": value, "lineno": lineno} ) + result = False + return result + def has__content(self): + if ( + self.Service is not None or + self.FirstClassMailType is not None or + self.ZipOrigination is not None or + self.ZipDestination is not None or + self.Pounds is not None or + self.Ounces is not None or + self.Container is not None or + self.Size is not None or + self.Width is not None or + self.Length is not None or + self.Height is not None or + self.Girth is not None or + self.Value is not None or + self.AmountToCollect is not None or + self.SpecialServices is not None or + self.Content is not None or + self.GroundOnly is not None or + self.SortBy is not None or + self.Machinable is not None or + self.ReturnLocations is not None or + self.ReturnServiceInfo is not None or + self.DropOffTime is not None or + self.ShipDate is not None or + self.ReturnDimensionalWeight is not None or + self.TrackingRetentionPeriod is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PackageType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PackageType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PackageType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PackageType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PackageType'): + if self.ID is not None and 'ID' not in already_processed: + already_processed.add('ID') + outfile.write(' ID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ID), input_name='ID')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Service is not None: + namespaceprefix_ = self.Service_nsprefix_ + ':' if (UseCapturedNS_ and self.Service_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sService>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Service), input_name='Service')), namespaceprefix_ , eol_)) + if self.FirstClassMailType is not None: + namespaceprefix_ = self.FirstClassMailType_nsprefix_ + ':' if (UseCapturedNS_ and self.FirstClassMailType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirstClassMailType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirstClassMailType), input_name='FirstClassMailType')), namespaceprefix_ , eol_)) + if self.ZipOrigination is not None: + namespaceprefix_ = self.ZipOrigination_nsprefix_ + ':' if (UseCapturedNS_ and self.ZipOrigination_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZipOrigination>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZipOrigination), input_name='ZipOrigination')), namespaceprefix_ , eol_)) + if self.ZipDestination is not None: + namespaceprefix_ = self.ZipDestination_nsprefix_ + ':' if (UseCapturedNS_ and self.ZipDestination_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZipDestination>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZipDestination), input_name='ZipDestination')), namespaceprefix_ , eol_)) + if self.Pounds is not None: + namespaceprefix_ = self.Pounds_nsprefix_ + ':' if (UseCapturedNS_ and self.Pounds_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPounds>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Pounds, input_name='Pounds'), namespaceprefix_ , eol_)) + if self.Ounces is not None: + namespaceprefix_ = self.Ounces_nsprefix_ + ':' if (UseCapturedNS_ and self.Ounces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOunces>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Ounces, input_name='Ounces'), namespaceprefix_ , eol_)) + if self.Container is not None: + namespaceprefix_ = self.Container_nsprefix_ + ':' if (UseCapturedNS_ and self.Container_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContainer>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Container), input_name='Container')), namespaceprefix_ , eol_)) + if self.Size is not None: + namespaceprefix_ = self.Size_nsprefix_ + ':' if (UseCapturedNS_ and self.Size_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSize>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Size), input_name='Size')), namespaceprefix_ , eol_)) + if self.Width is not None: + namespaceprefix_ = self.Width_nsprefix_ + ':' if (UseCapturedNS_ and self.Width_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sWidth>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Width, input_name='Width'), namespaceprefix_ , eol_)) + if self.Length is not None: + namespaceprefix_ = self.Length_nsprefix_ + ':' if (UseCapturedNS_ and self.Length_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLength>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Length, input_name='Length'), namespaceprefix_ , eol_)) + if self.Height is not None: + namespaceprefix_ = self.Height_nsprefix_ + ':' if (UseCapturedNS_ and self.Height_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHeight>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Height, input_name='Height'), namespaceprefix_ , eol_)) + if self.Girth is not None: + namespaceprefix_ = self.Girth_nsprefix_ + ':' if (UseCapturedNS_ and self.Girth_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGirth>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Girth, input_name='Girth'), namespaceprefix_ , eol_)) + if self.Value is not None: + namespaceprefix_ = self.Value_nsprefix_ + ':' if (UseCapturedNS_ and self.Value_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sValue>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Value), input_name='Value')), namespaceprefix_ , eol_)) + if self.AmountToCollect is not None: + namespaceprefix_ = self.AmountToCollect_nsprefix_ + ':' if (UseCapturedNS_ and self.AmountToCollect_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAmountToCollect>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AmountToCollect), input_name='AmountToCollect')), namespaceprefix_ , eol_)) + if self.SpecialServices is not None: + namespaceprefix_ = self.SpecialServices_nsprefix_ + ':' if (UseCapturedNS_ and self.SpecialServices_nsprefix_) else '' + self.SpecialServices.export(outfile, level, namespaceprefix_, namespacedef_='', name_='SpecialServices', pretty_print=pretty_print) + if self.Content is not None: + namespaceprefix_ = self.Content_nsprefix_ + ':' if (UseCapturedNS_ and self.Content_nsprefix_) else '' + self.Content.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Content', pretty_print=pretty_print) + if self.GroundOnly is not None: + namespaceprefix_ = self.GroundOnly_nsprefix_ + ':' if (UseCapturedNS_ and self.GroundOnly_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGroundOnly>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.GroundOnly, input_name='GroundOnly'), namespaceprefix_ , eol_)) + if self.SortBy is not None: + namespaceprefix_ = self.SortBy_nsprefix_ + ':' if (UseCapturedNS_ and self.SortBy_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSortBy>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SortBy), input_name='SortBy')), namespaceprefix_ , eol_)) + if self.Machinable is not None: + namespaceprefix_ = self.Machinable_nsprefix_ + ':' if (UseCapturedNS_ and self.Machinable_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMachinable>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.Machinable, input_name='Machinable'), namespaceprefix_ , eol_)) + if self.ReturnLocations is not None: + namespaceprefix_ = self.ReturnLocations_nsprefix_ + ':' if (UseCapturedNS_ and self.ReturnLocations_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sReturnLocations>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.ReturnLocations, input_name='ReturnLocations'), namespaceprefix_ , eol_)) + if self.ReturnServiceInfo is not None: + namespaceprefix_ = self.ReturnServiceInfo_nsprefix_ + ':' if (UseCapturedNS_ and self.ReturnServiceInfo_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sReturnServiceInfo>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.ReturnServiceInfo, input_name='ReturnServiceInfo'), namespaceprefix_ , eol_)) + if self.DropOffTime is not None: + namespaceprefix_ = self.DropOffTime_nsprefix_ + ':' if (UseCapturedNS_ and self.DropOffTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDropOffTime>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DropOffTime), input_name='DropOffTime')), namespaceprefix_ , eol_)) + if self.ShipDate is not None: + namespaceprefix_ = self.ShipDate_nsprefix_ + ':' if (UseCapturedNS_ and self.ShipDate_nsprefix_) else '' + self.ShipDate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ShipDate', pretty_print=pretty_print) + if self.ReturnDimensionalWeight is not None: + namespaceprefix_ = self.ReturnDimensionalWeight_nsprefix_ + ':' if (UseCapturedNS_ and self.ReturnDimensionalWeight_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sReturnDimensionalWeight>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ReturnDimensionalWeight), input_name='ReturnDimensionalWeight')), namespaceprefix_ , eol_)) + if self.TrackingRetentionPeriod is not None: + namespaceprefix_ = self.TrackingRetentionPeriod_nsprefix_ + ':' if (UseCapturedNS_ and self.TrackingRetentionPeriod_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTrackingRetentionPeriod>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TrackingRetentionPeriod), input_name='TrackingRetentionPeriod')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ID', node) + if value is not None and 'ID' not in already_processed: + already_processed.add('ID') + self.ID = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Service': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Service') + value_ = self.gds_validate_string(value_, node, 'Service') + self.Service = value_ + self.Service_nsprefix_ = child_.prefix + elif nodeName_ == 'FirstClassMailType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirstClassMailType') + value_ = self.gds_validate_string(value_, node, 'FirstClassMailType') + self.FirstClassMailType = value_ + self.FirstClassMailType_nsprefix_ = child_.prefix + elif nodeName_ == 'ZipOrigination': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZipOrigination') + value_ = self.gds_validate_string(value_, node, 'ZipOrigination') + self.ZipOrigination = value_ + self.ZipOrigination_nsprefix_ = child_.prefix + elif nodeName_ == 'ZipDestination': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ZipDestination') + value_ = self.gds_validate_string(value_, node, 'ZipDestination') + self.ZipDestination = value_ + self.ZipDestination_nsprefix_ = child_.prefix + elif nodeName_ == 'Pounds' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Pounds') + ival_ = self.gds_validate_integer(ival_, node, 'Pounds') + self.Pounds = ival_ + self.Pounds_nsprefix_ = child_.prefix + # validate type PoundsType + self.validate_PoundsType(self.Pounds) + elif nodeName_ == 'Ounces' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Ounces') + fval_ = self.gds_validate_decimal(fval_, node, 'Ounces') + self.Ounces = fval_ + self.Ounces_nsprefix_ = child_.prefix + # validate type OuncesType + self.validate_OuncesType(self.Ounces) + elif nodeName_ == 'Container': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Container') + value_ = self.gds_validate_string(value_, node, 'Container') + self.Container = value_ + self.Container_nsprefix_ = child_.prefix + elif nodeName_ == 'Size': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Size') + value_ = self.gds_validate_string(value_, node, 'Size') + self.Size = value_ + self.Size_nsprefix_ = child_.prefix + elif nodeName_ == 'Width' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Width') + fval_ = self.gds_validate_decimal(fval_, node, 'Width') + self.Width = fval_ + self.Width_nsprefix_ = child_.prefix + elif nodeName_ == 'Length' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Length') + fval_ = self.gds_validate_decimal(fval_, node, 'Length') + self.Length = fval_ + self.Length_nsprefix_ = child_.prefix + elif nodeName_ == 'Height' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Height') + fval_ = self.gds_validate_decimal(fval_, node, 'Height') + self.Height = fval_ + self.Height_nsprefix_ = child_.prefix + elif nodeName_ == 'Girth' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Girth') + fval_ = self.gds_validate_decimal(fval_, node, 'Girth') + self.Girth = fval_ + self.Girth_nsprefix_ = child_.prefix + elif nodeName_ == 'Value': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Value') + value_ = self.gds_validate_string(value_, node, 'Value') + self.Value = value_ + self.Value_nsprefix_ = child_.prefix + elif nodeName_ == 'AmountToCollect': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AmountToCollect') + value_ = self.gds_validate_string(value_, node, 'AmountToCollect') + self.AmountToCollect = value_ + self.AmountToCollect_nsprefix_ = child_.prefix + elif nodeName_ == 'SpecialServices': + obj_ = SpecialServicesType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.SpecialServices = obj_ + obj_.original_tagname_ = 'SpecialServices' + elif nodeName_ == 'Content': + obj_ = ContentType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Content = obj_ + obj_.original_tagname_ = 'Content' + elif nodeName_ == 'GroundOnly': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'GroundOnly') + ival_ = self.gds_validate_boolean(ival_, node, 'GroundOnly') + self.GroundOnly = ival_ + self.GroundOnly_nsprefix_ = child_.prefix + elif nodeName_ == 'SortBy': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SortBy') + value_ = self.gds_validate_string(value_, node, 'SortBy') + self.SortBy = value_ + self.SortBy_nsprefix_ = child_.prefix + elif nodeName_ == 'Machinable': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'Machinable') + ival_ = self.gds_validate_boolean(ival_, node, 'Machinable') + self.Machinable = ival_ + self.Machinable_nsprefix_ = child_.prefix + elif nodeName_ == 'ReturnLocations': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'ReturnLocations') + ival_ = self.gds_validate_boolean(ival_, node, 'ReturnLocations') + self.ReturnLocations = ival_ + self.ReturnLocations_nsprefix_ = child_.prefix + elif nodeName_ == 'ReturnServiceInfo': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'ReturnServiceInfo') + ival_ = self.gds_validate_boolean(ival_, node, 'ReturnServiceInfo') + self.ReturnServiceInfo = ival_ + self.ReturnServiceInfo_nsprefix_ = child_.prefix + elif nodeName_ == 'DropOffTime': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DropOffTime') + value_ = self.gds_validate_string(value_, node, 'DropOffTime') + self.DropOffTime = value_ + self.DropOffTime_nsprefix_ = child_.prefix + elif nodeName_ == 'ShipDate': + obj_ = ShipDateType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ShipDate = obj_ + obj_.original_tagname_ = 'ShipDate' + elif nodeName_ == 'ReturnDimensionalWeight': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ReturnDimensionalWeight') + value_ = self.gds_validate_string(value_, node, 'ReturnDimensionalWeight') + self.ReturnDimensionalWeight = value_ + self.ReturnDimensionalWeight_nsprefix_ = child_.prefix + elif nodeName_ == 'TrackingRetentionPeriod': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'TrackingRetentionPeriod') + value_ = self.gds_validate_string(value_, node, 'TrackingRetentionPeriod') + self.TrackingRetentionPeriod = value_ + self.TrackingRetentionPeriod_nsprefix_ = child_.prefix +# end class PackageType + + +class SpecialServicesType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, SpecialService=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if SpecialService is None: + self.SpecialService = [] + else: + self.SpecialService = SpecialService + self.SpecialService_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, SpecialServicesType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if SpecialServicesType.subclass: + return SpecialServicesType.subclass(*args_, **kwargs_) + else: + return SpecialServicesType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_SpecialService(self): + return self.SpecialService + def set_SpecialService(self, SpecialService): + self.SpecialService = SpecialService + def add_SpecialService(self, value): + self.SpecialService.append(value) + def insert_SpecialService_at(self, index, value): + self.SpecialService.insert(index, value) + def replace_SpecialService_at(self, index, value): + self.SpecialService[index] = value + def has__content(self): + if ( + self.SpecialService + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpecialServicesType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('SpecialServicesType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'SpecialServicesType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpecialServicesType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpecialServicesType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpecialServicesType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpecialServicesType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for SpecialService_ in self.SpecialService: + namespaceprefix_ = self.SpecialService_nsprefix_ + ':' if (UseCapturedNS_ and self.SpecialService_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSpecialService>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(SpecialService_), input_name='SpecialService')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'SpecialService': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SpecialService') + value_ = self.gds_validate_string(value_, node, 'SpecialService') + self.SpecialService.append(value_) + self.SpecialService_nsprefix_ = child_.prefix +# end class SpecialServicesType + + +class ContentType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ContentType_member=None, ContentDescription=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ContentType = ContentType_member + self.ContentType_nsprefix_ = None + self.ContentDescription = ContentDescription + self.ContentDescription_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ContentType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ContentType.subclass: + return ContentType.subclass(*args_, **kwargs_) + else: + return ContentType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ContentType(self): + return self.ContentType + def set_ContentType(self, ContentType): + self.ContentType = ContentType + def get_ContentDescription(self): + return self.ContentDescription + def set_ContentDescription(self, ContentDescription): + self.ContentDescription = ContentDescription + def has__content(self): + if ( + self.ContentType is not None or + self.ContentDescription is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContentType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ContentType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ContentType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContentType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ContentType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ContentType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContentType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ContentType is not None: + namespaceprefix_ = self.ContentType_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentType), input_name='ContentType')), namespaceprefix_ , eol_)) + if self.ContentDescription is not None: + namespaceprefix_ = self.ContentDescription_nsprefix_ + ':' if (UseCapturedNS_ and self.ContentDescription_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContentDescription>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContentDescription), input_name='ContentDescription')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ContentType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentType') + value_ = self.gds_validate_string(value_, node, 'ContentType') + self.ContentType = value_ + self.ContentType_nsprefix_ = child_.prefix + elif nodeName_ == 'ContentDescription': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ContentDescription') + value_ = self.gds_validate_string(value_, node, 'ContentDescription') + self.ContentDescription = value_ + self.ContentDescription_nsprefix_ = child_.prefix +# end class ContentType + + +class ShipDateType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Option=None, valueOf_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Option = _cast(None, Option) + self.Option_nsprefix_ = None + self.valueOf_ = valueOf_ + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ShipDateType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ShipDateType.subclass: + return ShipDateType.subclass(*args_, **kwargs_) + else: + return ShipDateType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Option(self): + return self.Option + def set_Option(self, Option): + self.Option = Option + def get_valueOf_(self): return self.valueOf_ + def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_ + def has__content(self): + if ( + (1 if type(self.valueOf_) in [int,float] else self.valueOf_) + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ShipDateType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ShipDateType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ShipDateType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ShipDateType') + outfile.write('>') + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_, pretty_print=pretty_print) + outfile.write(self.convert_unicode(self.valueOf_)) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ShipDateType'): + if self.Option is not None and 'Option' not in already_processed: + already_processed.add('Option') + outfile.write(' Option=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.Option), input_name='Option')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ShipDateType', fromsubclass_=False, pretty_print=True): + pass + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Option', node) + if value is not None and 'Option' not in already_processed: + already_processed.add('Option') + self.Option = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + pass +# end class ShipDateType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RateV4Request' + rootClass = RateV4Request + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RateV4Request' + rootClass = RateV4Request + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RateV4Request' + rootClass = RateV4Request + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RateV4Request' + rootClass = RateV4Request + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from rate_v4_request import *\n\n') + sys.stdout.write('import rate_v4_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "ContentType", + "PackageType", + "RateV4Request", + "ShipDateType", + "SpecialServicesType" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/rate_v4_response.py b/modules/connectors/usps_international/karrio/schemas/usps_international/rate_v4_response.py new file mode 100644 index 0000000000..3602900f87 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/rate_v4_response.py @@ -0,0 +1,2719 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:07 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/rate_v4_response.py') +# +# Command line arguments: +# ./schemas/RateV4Response.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/rate_v4_response.py" ./schemas/RateV4Response.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class RateV4Response(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Package=None, valueOf_=None, mixedclass_=None, content_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if Package is None: + self.Package = [] + else: + self.Package = Package + self.Package_nsprefix_ = None + self.valueOf_ = valueOf_ + if mixedclass_ is None: + self.mixedclass_ = MixedContainer + else: + self.mixedclass_ = mixedclass_ + if content_ is None: + self.content_ = [] + else: + self.content_ = content_ + self.valueOf_ = valueOf_ + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, RateV4Response) + if subclass is not None: + return subclass(*args_, **kwargs_) + if RateV4Response.subclass: + return RateV4Response.subclass(*args_, **kwargs_) + else: + return RateV4Response(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Package(self): + return self.Package + def set_Package(self, Package): + self.Package = Package + def add_Package(self, value): + self.Package.append(value) + def insert_Package_at(self, index, value): + self.Package.insert(index, value) + def replace_Package_at(self, index, value): + self.Package[index] = value + def get_valueOf_(self): return self.valueOf_ + def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_ + def has__content(self): + if ( + self.Package or + (1 if type(self.valueOf_) in [int,float] else self.valueOf_) or + self.content_ + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RateV4Response', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('RateV4Response') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'RateV4Response': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RateV4Response') + outfile.write('>') + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_, pretty_print=pretty_print) + outfile.write(self.convert_unicode(self.valueOf_)) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RateV4Response'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RateV4Response', fromsubclass_=False, pretty_print=True): + if not fromsubclass_: + for item_ in self.content_: + item_.export(outfile, level, item_.name, namespaceprefix_, pretty_print=pretty_print) + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for Package_ in self.Package: + namespaceprefix_ = self.Package_nsprefix_ + ':' if (UseCapturedNS_ and self.Package_nsprefix_) else '' + Package_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Package', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + if node.text is not None: + obj_ = self.mixedclass_(MixedContainer.CategoryText, + MixedContainer.TypeNone, '', node.text) + self.content_.append(obj_) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Package': + obj_ = PackageType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + obj_ = self.mixedclass_(MixedContainer.CategoryComplex, + MixedContainer.TypeNone, 'Package', obj_) + self.content_.append(obj_) + if hasattr(self, 'add_Package'): + self.add_Package(obj_.value) + elif hasattr(self, 'set_Package'): + self.set_Package(obj_.value) + if not fromsubclass_ and child_.tail is not None: + obj_ = self.mixedclass_(MixedContainer.CategoryText, + MixedContainer.TypeNone, '', child_.tail) + self.content_.append(obj_) +# end class RateV4Response + + +class PackageType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ID=None, Error=None, ZipOrigination=None, ZipDestination=None, Pounds=None, Ounces=None, FirstClassMailType=None, Container=None, Size=None, Width=None, Length=None, Height=None, Girth=None, Machinable=None, Zone=None, Postage=None, Restriction=None, valueOf_=None, mixedclass_=None, content_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ID = _cast(None, ID) + self.ID_nsprefix_ = None + self.Error = Error + self.Error_nsprefix_ = None + self.ZipOrigination = ZipOrigination + self.ZipOrigination_nsprefix_ = None + self.ZipDestination = ZipDestination + self.ZipDestination_nsprefix_ = None + self.Pounds = Pounds + self.Pounds_nsprefix_ = None + self.Ounces = Ounces + self.Ounces_nsprefix_ = None + self.FirstClassMailType = FirstClassMailType + self.FirstClassMailType_nsprefix_ = None + self.Container = Container + self.Container_nsprefix_ = None + self.Size = Size + self.Size_nsprefix_ = None + self.Width = Width + self.Width_nsprefix_ = None + self.Length = Length + self.Length_nsprefix_ = None + self.Height = Height + self.Height_nsprefix_ = None + self.Girth = Girth + self.Girth_nsprefix_ = None + self.Machinable = Machinable + self.Machinable_nsprefix_ = None + self.Zone = Zone + self.Zone_nsprefix_ = None + if Postage is None: + self.Postage = [] + else: + self.Postage = Postage + self.Postage_nsprefix_ = None + self.Restriction = Restriction + self.Restriction_nsprefix_ = None + self.valueOf_ = valueOf_ + if mixedclass_ is None: + self.mixedclass_ = MixedContainer + else: + self.mixedclass_ = mixedclass_ + if content_ is None: + self.content_ = [] + else: + self.content_ = content_ + self.valueOf_ = valueOf_ + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PackageType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PackageType.subclass: + return PackageType.subclass(*args_, **kwargs_) + else: + return PackageType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Error(self): + return self.Error + def set_Error(self, Error): + self.Error = Error + def get_ZipOrigination(self): + return self.ZipOrigination + def set_ZipOrigination(self, ZipOrigination): + self.ZipOrigination = ZipOrigination + def get_ZipDestination(self): + return self.ZipDestination + def set_ZipDestination(self, ZipDestination): + self.ZipDestination = ZipDestination + def get_Pounds(self): + return self.Pounds + def set_Pounds(self, Pounds): + self.Pounds = Pounds + def get_Ounces(self): + return self.Ounces + def set_Ounces(self, Ounces): + self.Ounces = Ounces + def get_FirstClassMailType(self): + return self.FirstClassMailType + def set_FirstClassMailType(self, FirstClassMailType): + self.FirstClassMailType = FirstClassMailType + def get_Container(self): + return self.Container + def set_Container(self, Container): + self.Container = Container + def get_Size(self): + return self.Size + def set_Size(self, Size): + self.Size = Size + def get_Width(self): + return self.Width + def set_Width(self, Width): + self.Width = Width + def get_Length(self): + return self.Length + def set_Length(self, Length): + self.Length = Length + def get_Height(self): + return self.Height + def set_Height(self, Height): + self.Height = Height + def get_Girth(self): + return self.Girth + def set_Girth(self, Girth): + self.Girth = Girth + def get_Machinable(self): + return self.Machinable + def set_Machinable(self, Machinable): + self.Machinable = Machinable + def get_Zone(self): + return self.Zone + def set_Zone(self, Zone): + self.Zone = Zone + def get_Postage(self): + return self.Postage + def set_Postage(self, Postage): + self.Postage = Postage + def add_Postage(self, value): + self.Postage.append(value) + def insert_Postage_at(self, index, value): + self.Postage.insert(index, value) + def replace_Postage_at(self, index, value): + self.Postage[index] = value + def get_Restriction(self): + return self.Restriction + def set_Restriction(self, Restriction): + self.Restriction = Restriction + def get_ID(self): + return self.ID + def set_ID(self, ID): + self.ID = ID + def get_valueOf_(self): return self.valueOf_ + def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_ + def has__content(self): + if ( + self.Error is not None or + self.ZipOrigination is not None or + self.ZipDestination is not None or + self.Pounds is not None or + self.Ounces is not None or + self.FirstClassMailType is not None or + self.Container is not None or + self.Size is not None or + self.Width is not None or + self.Length is not None or + self.Height is not None or + self.Girth is not None or + self.Machinable is not None or + self.Zone is not None or + self.Postage or + self.Restriction is not None or + (1 if type(self.valueOf_) in [int,float] else self.valueOf_) or + self.content_ + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PackageType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PackageType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PackageType') + outfile.write('>') + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_, pretty_print=pretty_print) + outfile.write(self.convert_unicode(self.valueOf_)) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PackageType'): + if self.ID is not None and 'ID' not in already_processed: + already_processed.add('ID') + outfile.write(' ID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ID), input_name='ID')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageType', fromsubclass_=False, pretty_print=True): + if not fromsubclass_: + for item_ in self.content_: + item_.export(outfile, level, item_.name, namespaceprefix_, pretty_print=pretty_print) + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Error is not None: + namespaceprefix_ = self.Error_nsprefix_ + ':' if (UseCapturedNS_ and self.Error_nsprefix_) else '' + self.Error.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Error', pretty_print=pretty_print) + if self.ZipOrigination is not None: + namespaceprefix_ = self.ZipOrigination_nsprefix_ + ':' if (UseCapturedNS_ and self.ZipOrigination_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZipOrigination>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZipOrigination), input_name='ZipOrigination')), namespaceprefix_ , eol_)) + if self.ZipDestination is not None: + namespaceprefix_ = self.ZipDestination_nsprefix_ + ':' if (UseCapturedNS_ and self.ZipDestination_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZipDestination>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ZipDestination), input_name='ZipDestination')), namespaceprefix_ , eol_)) + if self.Pounds is not None: + namespaceprefix_ = self.Pounds_nsprefix_ + ':' if (UseCapturedNS_ and self.Pounds_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPounds>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Pounds, input_name='Pounds'), namespaceprefix_ , eol_)) + if self.Ounces is not None: + namespaceprefix_ = self.Ounces_nsprefix_ + ':' if (UseCapturedNS_ and self.Ounces_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOunces>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Ounces, input_name='Ounces'), namespaceprefix_ , eol_)) + if self.FirstClassMailType is not None: + namespaceprefix_ = self.FirstClassMailType_nsprefix_ + ':' if (UseCapturedNS_ and self.FirstClassMailType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirstClassMailType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirstClassMailType), input_name='FirstClassMailType')), namespaceprefix_ , eol_)) + if self.Container is not None: + namespaceprefix_ = self.Container_nsprefix_ + ':' if (UseCapturedNS_ and self.Container_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sContainer>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Container), input_name='Container')), namespaceprefix_ , eol_)) + if self.Size is not None: + namespaceprefix_ = self.Size_nsprefix_ + ':' if (UseCapturedNS_ and self.Size_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSize>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Size), input_name='Size')), namespaceprefix_ , eol_)) + if self.Width is not None: + namespaceprefix_ = self.Width_nsprefix_ + ':' if (UseCapturedNS_ and self.Width_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sWidth>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Width, input_name='Width'), namespaceprefix_ , eol_)) + if self.Length is not None: + namespaceprefix_ = self.Length_nsprefix_ + ':' if (UseCapturedNS_ and self.Length_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sLength>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Length, input_name='Length'), namespaceprefix_ , eol_)) + if self.Height is not None: + namespaceprefix_ = self.Height_nsprefix_ + ':' if (UseCapturedNS_ and self.Height_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHeight>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Height, input_name='Height'), namespaceprefix_ , eol_)) + if self.Girth is not None: + namespaceprefix_ = self.Girth_nsprefix_ + ':' if (UseCapturedNS_ and self.Girth_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGirth>%s%s' % (namespaceprefix_ , self.gds_format_decimal(self.Girth, input_name='Girth'), namespaceprefix_ , eol_)) + if self.Machinable is not None: + namespaceprefix_ = self.Machinable_nsprefix_ + ':' if (UseCapturedNS_ and self.Machinable_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMachinable>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Machinable), input_name='Machinable')), namespaceprefix_ , eol_)) + if self.Zone is not None: + namespaceprefix_ = self.Zone_nsprefix_ + ':' if (UseCapturedNS_ and self.Zone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZone>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Zone, input_name='Zone'), namespaceprefix_ , eol_)) + for Postage_ in self.Postage: + namespaceprefix_ = self.Postage_nsprefix_ + ':' if (UseCapturedNS_ and self.Postage_nsprefix_) else '' + Postage_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Postage', pretty_print=pretty_print) + if self.Restriction is not None: + namespaceprefix_ = self.Restriction_nsprefix_ + ':' if (UseCapturedNS_ and self.Restriction_nsprefix_) else '' + self.Restriction.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Restriction', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + if node.text is not None: + obj_ = self.mixedclass_(MixedContainer.CategoryText, + MixedContainer.TypeNone, '', node.text) + self.content_.append(obj_) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ID', node) + if value is not None and 'ID' not in already_processed: + already_processed.add('ID') + self.ID = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Error': + obj_ = ErrorType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + obj_ = self.mixedclass_(MixedContainer.CategoryComplex, + MixedContainer.TypeNone, 'Error', obj_) + self.content_.append(obj_) + if hasattr(self, 'add_Error'): + self.add_Error(obj_.value) + elif hasattr(self, 'set_Error'): + self.set_Error(obj_.value) + elif nodeName_ == 'ZipOrigination' and child_.text is not None: + valuestr_ = child_.text + valuestr_ = self.gds_parse_string(valuestr_, node, 'ZipOrigination') + valuestr_ = self.gds_validate_string(valuestr_, node, 'ZipOrigination') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeString, 'ZipOrigination', valuestr_) + self.content_.append(obj_) + self.ZipOrigination_nsprefix_ = child_.prefix + elif nodeName_ == 'ZipDestination' and child_.text is not None: + valuestr_ = child_.text + valuestr_ = self.gds_parse_string(valuestr_, node, 'ZipDestination') + valuestr_ = self.gds_validate_string(valuestr_, node, 'ZipDestination') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeString, 'ZipDestination', valuestr_) + self.content_.append(obj_) + self.ZipDestination_nsprefix_ = child_.prefix + elif nodeName_ == 'Pounds' and child_.text is not None: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Pounds') + ival_ = self.gds_validate_integer(ival_, node, 'Pounds') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeInteger, 'Pounds', ival_) + self.content_.append(obj_) + self.Pounds_nsprefix_ = child_.prefix + elif nodeName_ == 'Ounces' and child_.text is not None: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Ounces') + fval_ = self.gds_validate_decimal(fval_, node, 'Ounces') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeFloat, 'Ounces', fval_) + self.content_.append(obj_) + self.Ounces_nsprefix_ = child_.prefix + elif nodeName_ == 'FirstClassMailType' and child_.text is not None: + valuestr_ = child_.text + valuestr_ = self.gds_parse_string(valuestr_, node, 'FirstClassMailType') + valuestr_ = self.gds_validate_string(valuestr_, node, 'FirstClassMailType') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeString, 'FirstClassMailType', valuestr_) + self.content_.append(obj_) + self.FirstClassMailType_nsprefix_ = child_.prefix + elif nodeName_ == 'Container' and child_.text is not None: + valuestr_ = child_.text + valuestr_ = self.gds_parse_string(valuestr_, node, 'Container') + valuestr_ = self.gds_validate_string(valuestr_, node, 'Container') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeString, 'Container', valuestr_) + self.content_.append(obj_) + self.Container_nsprefix_ = child_.prefix + elif nodeName_ == 'Size' and child_.text is not None: + valuestr_ = child_.text + valuestr_ = self.gds_parse_string(valuestr_, node, 'Size') + valuestr_ = self.gds_validate_string(valuestr_, node, 'Size') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeString, 'Size', valuestr_) + self.content_.append(obj_) + self.Size_nsprefix_ = child_.prefix + elif nodeName_ == 'Width' and child_.text is not None: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Width') + fval_ = self.gds_validate_decimal(fval_, node, 'Width') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeFloat, 'Width', fval_) + self.content_.append(obj_) + self.Width_nsprefix_ = child_.prefix + elif nodeName_ == 'Length' and child_.text is not None: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Length') + fval_ = self.gds_validate_decimal(fval_, node, 'Length') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeFloat, 'Length', fval_) + self.content_.append(obj_) + self.Length_nsprefix_ = child_.prefix + elif nodeName_ == 'Height' and child_.text is not None: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Height') + fval_ = self.gds_validate_decimal(fval_, node, 'Height') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeFloat, 'Height', fval_) + self.content_.append(obj_) + self.Height_nsprefix_ = child_.prefix + elif nodeName_ == 'Girth' and child_.text is not None: + sval_ = child_.text + fval_ = self.gds_parse_decimal(sval_, node, 'Girth') + fval_ = self.gds_validate_decimal(fval_, node, 'Girth') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeFloat, 'Girth', fval_) + self.content_.append(obj_) + self.Girth_nsprefix_ = child_.prefix + elif nodeName_ == 'Machinable' and child_.text is not None: + valuestr_ = child_.text + valuestr_ = self.gds_parse_string(valuestr_, node, 'Machinable') + valuestr_ = self.gds_validate_string(valuestr_, node, 'Machinable') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeString, 'Machinable', valuestr_) + self.content_.append(obj_) + self.Machinable_nsprefix_ = child_.prefix + elif nodeName_ == 'Zone' and child_.text is not None: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Zone') + ival_ = self.gds_validate_integer(ival_, node, 'Zone') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeInteger, 'Zone', ival_) + self.content_.append(obj_) + self.Zone_nsprefix_ = child_.prefix + elif nodeName_ == 'Postage': + obj_ = PostageType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + obj_ = self.mixedclass_(MixedContainer.CategoryComplex, + MixedContainer.TypeNone, 'Postage', obj_) + self.content_.append(obj_) + if hasattr(self, 'add_Postage'): + self.add_Postage(obj_.value) + elif hasattr(self, 'set_Postage'): + self.set_Postage(obj_.value) + elif nodeName_ == 'Restriction': + obj_ = RestrictionType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + obj_ = self.mixedclass_(MixedContainer.CategoryComplex, + MixedContainer.TypeNone, 'Restriction', obj_) + self.content_.append(obj_) + if hasattr(self, 'add_Restriction'): + self.add_Restriction(obj_.value) + elif hasattr(self, 'set_Restriction'): + self.set_Restriction(obj_.value) + if not fromsubclass_ and child_.tail is not None: + obj_ = self.mixedclass_(MixedContainer.CategoryText, + MixedContainer.TypeNone, '', child_.tail) + self.content_.append(obj_) +# end class PackageType + + +class ErrorType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Number=None, Source=None, Description=None, HelpFile=None, HelpContext=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Number = Number + self.Number_nsprefix_ = None + self.Source = Source + self.Source_nsprefix_ = None + self.Description = Description + self.Description_nsprefix_ = None + self.HelpFile = HelpFile + self.HelpFile_nsprefix_ = None + self.HelpContext = HelpContext + self.HelpContext_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ErrorType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ErrorType.subclass: + return ErrorType.subclass(*args_, **kwargs_) + else: + return ErrorType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Number(self): + return self.Number + def set_Number(self, Number): + self.Number = Number + def get_Source(self): + return self.Source + def set_Source(self, Source): + self.Source = Source + def get_Description(self): + return self.Description + def set_Description(self, Description): + self.Description = Description + def get_HelpFile(self): + return self.HelpFile + def set_HelpFile(self, HelpFile): + self.HelpFile = HelpFile + def get_HelpContext(self): + return self.HelpContext + def set_HelpContext(self, HelpContext): + self.HelpContext = HelpContext + def has__content(self): + if ( + self.Number is not None or + self.Source is not None or + self.Description is not None or + self.HelpFile is not None or + self.HelpContext is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ErrorType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ErrorType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ErrorType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ErrorType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ErrorType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ErrorType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ErrorType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Number is not None: + namespaceprefix_ = self.Number_nsprefix_ + ':' if (UseCapturedNS_ and self.Number_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNumber>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Number, input_name='Number'), namespaceprefix_ , eol_)) + if self.Source is not None: + namespaceprefix_ = self.Source_nsprefix_ + ':' if (UseCapturedNS_ and self.Source_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSource>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Source), input_name='Source')), namespaceprefix_ , eol_)) + if self.Description is not None: + namespaceprefix_ = self.Description_nsprefix_ + ':' if (UseCapturedNS_ and self.Description_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDescription>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Description), input_name='Description')), namespaceprefix_ , eol_)) + if self.HelpFile is not None: + namespaceprefix_ = self.HelpFile_nsprefix_ + ':' if (UseCapturedNS_ and self.HelpFile_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHelpFile>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.HelpFile), input_name='HelpFile')), namespaceprefix_ , eol_)) + if self.HelpContext is not None: + namespaceprefix_ = self.HelpContext_nsprefix_ + ':' if (UseCapturedNS_ and self.HelpContext_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sHelpContext>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.HelpContext, input_name='HelpContext'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Number' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Number') + ival_ = self.gds_validate_integer(ival_, node, 'Number') + self.Number = ival_ + self.Number_nsprefix_ = child_.prefix + elif nodeName_ == 'Source': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Source') + value_ = self.gds_validate_string(value_, node, 'Source') + self.Source = value_ + self.Source_nsprefix_ = child_.prefix + elif nodeName_ == 'Description': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Description') + value_ = self.gds_validate_string(value_, node, 'Description') + self.Description = value_ + self.Description_nsprefix_ = child_.prefix + elif nodeName_ == 'HelpFile': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'HelpFile') + value_ = self.gds_validate_string(value_, node, 'HelpFile') + self.HelpFile = value_ + self.HelpFile_nsprefix_ = child_.prefix + elif nodeName_ == 'HelpContext' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'HelpContext') + ival_ = self.gds_validate_integer(ival_, node, 'HelpContext') + self.HelpContext = ival_ + self.HelpContext_nsprefix_ = child_.prefix +# end class ErrorType + + +class PostageType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, CLASSID=None, MailService=None, Rate=None, CommercialRate=None, CommercialPlusRate=None, CommitmentDate=None, CommitmentName=None, MaxDimensions=None, ServiceInformation=None, SpecialServices=None, Zone=None, valueOf_=None, mixedclass_=None, content_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.CLASSID = _cast(int, CLASSID) + self.CLASSID_nsprefix_ = None + self.MailService = MailService + self.MailService_nsprefix_ = None + self.Rate = Rate + self.Rate_nsprefix_ = None + self.CommercialRate = CommercialRate + self.CommercialRate_nsprefix_ = None + self.CommercialPlusRate = CommercialPlusRate + self.CommercialPlusRate_nsprefix_ = None + self.CommitmentDate = CommitmentDate + self.CommitmentDate_nsprefix_ = None + self.CommitmentName = CommitmentName + self.CommitmentName_nsprefix_ = None + self.MaxDimensions = MaxDimensions + self.MaxDimensions_nsprefix_ = None + self.ServiceInformation = ServiceInformation + self.ServiceInformation_nsprefix_ = None + self.SpecialServices = SpecialServices + self.SpecialServices_nsprefix_ = None + self.Zone = Zone + self.Zone_nsprefix_ = None + self.valueOf_ = valueOf_ + if mixedclass_ is None: + self.mixedclass_ = MixedContainer + else: + self.mixedclass_ = mixedclass_ + if content_ is None: + self.content_ = [] + else: + self.content_ = content_ + self.valueOf_ = valueOf_ + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PostageType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PostageType.subclass: + return PostageType.subclass(*args_, **kwargs_) + else: + return PostageType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_MailService(self): + return self.MailService + def set_MailService(self, MailService): + self.MailService = MailService + def get_Rate(self): + return self.Rate + def set_Rate(self, Rate): + self.Rate = Rate + def get_CommercialRate(self): + return self.CommercialRate + def set_CommercialRate(self, CommercialRate): + self.CommercialRate = CommercialRate + def get_CommercialPlusRate(self): + return self.CommercialPlusRate + def set_CommercialPlusRate(self, CommercialPlusRate): + self.CommercialPlusRate = CommercialPlusRate + def get_CommitmentDate(self): + return self.CommitmentDate + def set_CommitmentDate(self, CommitmentDate): + self.CommitmentDate = CommitmentDate + def get_CommitmentName(self): + return self.CommitmentName + def set_CommitmentName(self, CommitmentName): + self.CommitmentName = CommitmentName + def get_MaxDimensions(self): + return self.MaxDimensions + def set_MaxDimensions(self, MaxDimensions): + self.MaxDimensions = MaxDimensions + def get_ServiceInformation(self): + return self.ServiceInformation + def set_ServiceInformation(self, ServiceInformation): + self.ServiceInformation = ServiceInformation + def get_SpecialServices(self): + return self.SpecialServices + def set_SpecialServices(self, SpecialServices): + self.SpecialServices = SpecialServices + def get_Zone(self): + return self.Zone + def set_Zone(self, Zone): + self.Zone = Zone + def get_CLASSID(self): + return self.CLASSID + def set_CLASSID(self, CLASSID): + self.CLASSID = CLASSID + def get_valueOf_(self): return self.valueOf_ + def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_ + def has__content(self): + if ( + self.MailService is not None or + self.Rate is not None or + self.CommercialRate is not None or + self.CommercialPlusRate is not None or + self.CommitmentDate is not None or + self.CommitmentName is not None or + self.MaxDimensions is not None or + self.ServiceInformation is not None or + self.SpecialServices is not None or + self.Zone is not None or + (1 if type(self.valueOf_) in [int,float] else self.valueOf_) or + self.content_ + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PostageType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PostageType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PostageType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PostageType') + outfile.write('>') + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_, pretty_print=pretty_print) + outfile.write(self.convert_unicode(self.valueOf_)) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PostageType'): + if self.CLASSID is not None and 'CLASSID' not in already_processed: + already_processed.add('CLASSID') + outfile.write(' CLASSID="%s"' % self.gds_format_integer(self.CLASSID, input_name='CLASSID')) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PostageType', fromsubclass_=False, pretty_print=True): + if not fromsubclass_: + for item_ in self.content_: + item_.export(outfile, level, item_.name, namespaceprefix_, pretty_print=pretty_print) + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.MailService is not None: + namespaceprefix_ = self.MailService_nsprefix_ + ':' if (UseCapturedNS_ and self.MailService_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMailService>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MailService), input_name='MailService')), namespaceprefix_ , eol_)) + if self.Rate is not None: + namespaceprefix_ = self.Rate_nsprefix_ + ':' if (UseCapturedNS_ and self.Rate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRate>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Rate, input_name='Rate'), namespaceprefix_ , eol_)) + if self.CommercialRate is not None: + namespaceprefix_ = self.CommercialRate_nsprefix_ + ':' if (UseCapturedNS_ and self.CommercialRate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommercialRate>%s%s' % (namespaceprefix_ , self.gds_format_float(self.CommercialRate, input_name='CommercialRate'), namespaceprefix_ , eol_)) + if self.CommercialPlusRate is not None: + namespaceprefix_ = self.CommercialPlusRate_nsprefix_ + ':' if (UseCapturedNS_ and self.CommercialPlusRate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommercialPlusRate>%s%s' % (namespaceprefix_ , self.gds_format_float(self.CommercialPlusRate, input_name='CommercialPlusRate'), namespaceprefix_ , eol_)) + if self.CommitmentDate is not None: + namespaceprefix_ = self.CommitmentDate_nsprefix_ + ':' if (UseCapturedNS_ and self.CommitmentDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommitmentDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CommitmentDate), input_name='CommitmentDate')), namespaceprefix_ , eol_)) + if self.CommitmentName is not None: + namespaceprefix_ = self.CommitmentName_nsprefix_ + ':' if (UseCapturedNS_ and self.CommitmentName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommitmentName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CommitmentName), input_name='CommitmentName')), namespaceprefix_ , eol_)) + if self.MaxDimensions is not None: + namespaceprefix_ = self.MaxDimensions_nsprefix_ + ':' if (UseCapturedNS_ and self.MaxDimensions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMaxDimensions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MaxDimensions), input_name='MaxDimensions')), namespaceprefix_ , eol_)) + if self.ServiceInformation is not None: + namespaceprefix_ = self.ServiceInformation_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceInformation_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceInformation>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceInformation), input_name='ServiceInformation')), namespaceprefix_ , eol_)) + if self.SpecialServices is not None: + namespaceprefix_ = self.SpecialServices_nsprefix_ + ':' if (UseCapturedNS_ and self.SpecialServices_nsprefix_) else '' + self.SpecialServices.export(outfile, level, namespaceprefix_, namespacedef_='', name_='SpecialServices', pretty_print=pretty_print) + if self.Zone is not None: + namespaceprefix_ = self.Zone_nsprefix_ + ':' if (UseCapturedNS_ and self.Zone_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZone>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Zone), input_name='Zone')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + if node.text is not None: + obj_ = self.mixedclass_(MixedContainer.CategoryText, + MixedContainer.TypeNone, '', node.text) + self.content_.append(obj_) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('CLASSID', node) + if value is not None and 'CLASSID' not in already_processed: + already_processed.add('CLASSID') + self.CLASSID = self.gds_parse_integer(value, node, 'CLASSID') + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'MailService' and child_.text is not None: + valuestr_ = child_.text + valuestr_ = self.gds_parse_string(valuestr_, node, 'MailService') + valuestr_ = self.gds_validate_string(valuestr_, node, 'MailService') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeString, 'MailService', valuestr_) + self.content_.append(obj_) + self.MailService_nsprefix_ = child_.prefix + elif nodeName_ == 'Rate' and child_.text is not None: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Rate') + fval_ = self.gds_validate_float(fval_, node, 'Rate') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeFloat, 'Rate', fval_) + self.content_.append(obj_) + self.Rate_nsprefix_ = child_.prefix + elif nodeName_ == 'CommercialRate' and child_.text is not None: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'CommercialRate') + fval_ = self.gds_validate_float(fval_, node, 'CommercialRate') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeFloat, 'CommercialRate', fval_) + self.content_.append(obj_) + self.CommercialRate_nsprefix_ = child_.prefix + elif nodeName_ == 'CommercialPlusRate' and child_.text is not None: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'CommercialPlusRate') + fval_ = self.gds_validate_float(fval_, node, 'CommercialPlusRate') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeFloat, 'CommercialPlusRate', fval_) + self.content_.append(obj_) + self.CommercialPlusRate_nsprefix_ = child_.prefix + elif nodeName_ == 'CommitmentDate' and child_.text is not None: + valuestr_ = child_.text + valuestr_ = self.gds_parse_string(valuestr_, node, 'CommitmentDate') + valuestr_ = self.gds_validate_string(valuestr_, node, 'CommitmentDate') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeString, 'CommitmentDate', valuestr_) + self.content_.append(obj_) + self.CommitmentDate_nsprefix_ = child_.prefix + elif nodeName_ == 'CommitmentName' and child_.text is not None: + valuestr_ = child_.text + valuestr_ = self.gds_parse_string(valuestr_, node, 'CommitmentName') + valuestr_ = self.gds_validate_string(valuestr_, node, 'CommitmentName') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeString, 'CommitmentName', valuestr_) + self.content_.append(obj_) + self.CommitmentName_nsprefix_ = child_.prefix + elif nodeName_ == 'MaxDimensions' and child_.text is not None: + valuestr_ = child_.text + valuestr_ = self.gds_parse_string(valuestr_, node, 'MaxDimensions') + valuestr_ = self.gds_validate_string(valuestr_, node, 'MaxDimensions') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeString, 'MaxDimensions', valuestr_) + self.content_.append(obj_) + self.MaxDimensions_nsprefix_ = child_.prefix + elif nodeName_ == 'ServiceInformation' and child_.text is not None: + valuestr_ = child_.text + valuestr_ = self.gds_parse_string(valuestr_, node, 'ServiceInformation') + valuestr_ = self.gds_validate_string(valuestr_, node, 'ServiceInformation') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeString, 'ServiceInformation', valuestr_) + self.content_.append(obj_) + self.ServiceInformation_nsprefix_ = child_.prefix + elif nodeName_ == 'SpecialServices': + obj_ = SpecialServicesType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + obj_ = self.mixedclass_(MixedContainer.CategoryComplex, + MixedContainer.TypeNone, 'SpecialServices', obj_) + self.content_.append(obj_) + if hasattr(self, 'add_SpecialServices'): + self.add_SpecialServices(obj_.value) + elif hasattr(self, 'set_SpecialServices'): + self.set_SpecialServices(obj_.value) + elif nodeName_ == 'Zone' and child_.text is not None: + valuestr_ = child_.text + valuestr_ = self.gds_parse_string(valuestr_, node, 'Zone') + valuestr_ = self.gds_validate_string(valuestr_, node, 'Zone') + obj_ = self.mixedclass_(MixedContainer.CategorySimple, + MixedContainer.TypeString, 'Zone', valuestr_) + self.content_.append(obj_) + self.Zone_nsprefix_ = child_.prefix + if not fromsubclass_ and child_.tail is not None: + obj_ = self.mixedclass_(MixedContainer.CategoryText, + MixedContainer.TypeNone, '', child_.tail) + self.content_.append(obj_) +# end class PostageType + + +class SpecialServicesType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, SpecialService=None, valueOf_=None, mixedclass_=None, content_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if SpecialService is None: + self.SpecialService = [] + else: + self.SpecialService = SpecialService + self.SpecialService_nsprefix_ = None + self.valueOf_ = valueOf_ + if mixedclass_ is None: + self.mixedclass_ = MixedContainer + else: + self.mixedclass_ = mixedclass_ + if content_ is None: + self.content_ = [] + else: + self.content_ = content_ + self.valueOf_ = valueOf_ + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, SpecialServicesType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if SpecialServicesType.subclass: + return SpecialServicesType.subclass(*args_, **kwargs_) + else: + return SpecialServicesType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_SpecialService(self): + return self.SpecialService + def set_SpecialService(self, SpecialService): + self.SpecialService = SpecialService + def add_SpecialService(self, value): + self.SpecialService.append(value) + def insert_SpecialService_at(self, index, value): + self.SpecialService.insert(index, value) + def replace_SpecialService_at(self, index, value): + self.SpecialService[index] = value + def get_valueOf_(self): return self.valueOf_ + def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_ + def has__content(self): + if ( + self.SpecialService or + (1 if type(self.valueOf_) in [int,float] else self.valueOf_) or + self.content_ + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpecialServicesType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('SpecialServicesType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'SpecialServicesType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpecialServicesType') + outfile.write('>') + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_, pretty_print=pretty_print) + outfile.write(self.convert_unicode(self.valueOf_)) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpecialServicesType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpecialServicesType', fromsubclass_=False, pretty_print=True): + if not fromsubclass_: + for item_ in self.content_: + item_.export(outfile, level, item_.name, namespaceprefix_, pretty_print=pretty_print) + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for SpecialService_ in self.SpecialService: + namespaceprefix_ = self.SpecialService_nsprefix_ + ':' if (UseCapturedNS_ and self.SpecialService_nsprefix_) else '' + SpecialService_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='SpecialService', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + if node.text is not None: + obj_ = self.mixedclass_(MixedContainer.CategoryText, + MixedContainer.TypeNone, '', node.text) + self.content_.append(obj_) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'SpecialService': + obj_ = SpecialServiceType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + obj_ = self.mixedclass_(MixedContainer.CategoryComplex, + MixedContainer.TypeNone, 'SpecialService', obj_) + self.content_.append(obj_) + if hasattr(self, 'add_SpecialService'): + self.add_SpecialService(obj_.value) + elif hasattr(self, 'set_SpecialService'): + self.set_SpecialService(obj_.value) + if not fromsubclass_ and child_.tail is not None: + obj_ = self.mixedclass_(MixedContainer.CategoryText, + MixedContainer.TypeNone, '', child_.tail) + self.content_.append(obj_) +# end class SpecialServicesType + + +class SpecialServiceType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ServiceID=None, ServiceName=None, Available=None, AvailableOnline=None, AvailableCPP=None, Price=None, PriceOnline=None, PriceCPP=None, DeclaredValueRequired=None, DueSenderRequired=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ServiceID = ServiceID + self.ServiceID_nsprefix_ = None + self.ServiceName = ServiceName + self.ServiceName_nsprefix_ = None + self.Available = Available + self.Available_nsprefix_ = None + self.AvailableOnline = AvailableOnline + self.AvailableOnline_nsprefix_ = None + self.AvailableCPP = AvailableCPP + self.AvailableCPP_nsprefix_ = None + self.Price = Price + self.Price_nsprefix_ = None + self.PriceOnline = PriceOnline + self.PriceOnline_nsprefix_ = None + self.PriceCPP = PriceCPP + self.PriceCPP_nsprefix_ = None + self.DeclaredValueRequired = DeclaredValueRequired + self.DeclaredValueRequired_nsprefix_ = None + self.DueSenderRequired = DueSenderRequired + self.DueSenderRequired_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, SpecialServiceType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if SpecialServiceType.subclass: + return SpecialServiceType.subclass(*args_, **kwargs_) + else: + return SpecialServiceType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ServiceID(self): + return self.ServiceID + def set_ServiceID(self, ServiceID): + self.ServiceID = ServiceID + def get_ServiceName(self): + return self.ServiceName + def set_ServiceName(self, ServiceName): + self.ServiceName = ServiceName + def get_Available(self): + return self.Available + def set_Available(self, Available): + self.Available = Available + def get_AvailableOnline(self): + return self.AvailableOnline + def set_AvailableOnline(self, AvailableOnline): + self.AvailableOnline = AvailableOnline + def get_AvailableCPP(self): + return self.AvailableCPP + def set_AvailableCPP(self, AvailableCPP): + self.AvailableCPP = AvailableCPP + def get_Price(self): + return self.Price + def set_Price(self, Price): + self.Price = Price + def get_PriceOnline(self): + return self.PriceOnline + def set_PriceOnline(self, PriceOnline): + self.PriceOnline = PriceOnline + def get_PriceCPP(self): + return self.PriceCPP + def set_PriceCPP(self, PriceCPP): + self.PriceCPP = PriceCPP + def get_DeclaredValueRequired(self): + return self.DeclaredValueRequired + def set_DeclaredValueRequired(self, DeclaredValueRequired): + self.DeclaredValueRequired = DeclaredValueRequired + def get_DueSenderRequired(self): + return self.DueSenderRequired + def set_DueSenderRequired(self, DueSenderRequired): + self.DueSenderRequired = DueSenderRequired + def has__content(self): + if ( + self.ServiceID is not None or + self.ServiceName is not None or + self.Available is not None or + self.AvailableOnline is not None or + self.AvailableCPP is not None or + self.Price is not None or + self.PriceOnline is not None or + self.PriceCPP is not None or + self.DeclaredValueRequired is not None or + self.DueSenderRequired is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpecialServiceType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('SpecialServiceType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'SpecialServiceType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpecialServiceType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpecialServiceType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpecialServiceType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpecialServiceType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ServiceID is not None: + namespaceprefix_ = self.ServiceID_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceID>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ServiceID, input_name='ServiceID'), namespaceprefix_ , eol_)) + if self.ServiceName is not None: + namespaceprefix_ = self.ServiceName_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceName), input_name='ServiceName')), namespaceprefix_ , eol_)) + if self.Available is not None: + namespaceprefix_ = self.Available_nsprefix_ + ':' if (UseCapturedNS_ and self.Available_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAvailable>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.Available, input_name='Available'), namespaceprefix_ , eol_)) + if self.AvailableOnline is not None: + namespaceprefix_ = self.AvailableOnline_nsprefix_ + ':' if (UseCapturedNS_ and self.AvailableOnline_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAvailableOnline>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.AvailableOnline, input_name='AvailableOnline'), namespaceprefix_ , eol_)) + if self.AvailableCPP is not None: + namespaceprefix_ = self.AvailableCPP_nsprefix_ + ':' if (UseCapturedNS_ and self.AvailableCPP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAvailableCPP>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.AvailableCPP, input_name='AvailableCPP'), namespaceprefix_ , eol_)) + if self.Price is not None: + namespaceprefix_ = self.Price_nsprefix_ + ':' if (UseCapturedNS_ and self.Price_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPrice>%s%s' % (namespaceprefix_ , self.gds_format_float(self.Price, input_name='Price'), namespaceprefix_ , eol_)) + if self.PriceOnline is not None: + namespaceprefix_ = self.PriceOnline_nsprefix_ + ':' if (UseCapturedNS_ and self.PriceOnline_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPriceOnline>%s%s' % (namespaceprefix_ , self.gds_format_float(self.PriceOnline, input_name='PriceOnline'), namespaceprefix_ , eol_)) + if self.PriceCPP is not None: + namespaceprefix_ = self.PriceCPP_nsprefix_ + ':' if (UseCapturedNS_ and self.PriceCPP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPriceCPP>%s%s' % (namespaceprefix_ , self.gds_format_float(self.PriceCPP, input_name='PriceCPP'), namespaceprefix_ , eol_)) + if self.DeclaredValueRequired is not None: + namespaceprefix_ = self.DeclaredValueRequired_nsprefix_ + ':' if (UseCapturedNS_ and self.DeclaredValueRequired_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDeclaredValueRequired>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.DeclaredValueRequired, input_name='DeclaredValueRequired'), namespaceprefix_ , eol_)) + if self.DueSenderRequired is not None: + namespaceprefix_ = self.DueSenderRequired_nsprefix_ + ':' if (UseCapturedNS_ and self.DueSenderRequired_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDueSenderRequired>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.DueSenderRequired, input_name='DueSenderRequired'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'ServiceID' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ServiceID') + ival_ = self.gds_validate_integer(ival_, node, 'ServiceID') + self.ServiceID = ival_ + self.ServiceID_nsprefix_ = child_.prefix + elif nodeName_ == 'ServiceName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceName') + value_ = self.gds_validate_string(value_, node, 'ServiceName') + self.ServiceName = value_ + self.ServiceName_nsprefix_ = child_.prefix + elif nodeName_ == 'Available': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'Available') + ival_ = self.gds_validate_boolean(ival_, node, 'Available') + self.Available = ival_ + self.Available_nsprefix_ = child_.prefix + elif nodeName_ == 'AvailableOnline': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'AvailableOnline') + ival_ = self.gds_validate_boolean(ival_, node, 'AvailableOnline') + self.AvailableOnline = ival_ + self.AvailableOnline_nsprefix_ = child_.prefix + elif nodeName_ == 'AvailableCPP': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'AvailableCPP') + ival_ = self.gds_validate_boolean(ival_, node, 'AvailableCPP') + self.AvailableCPP = ival_ + self.AvailableCPP_nsprefix_ = child_.prefix + elif nodeName_ == 'Price' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'Price') + fval_ = self.gds_validate_float(fval_, node, 'Price') + self.Price = fval_ + self.Price_nsprefix_ = child_.prefix + elif nodeName_ == 'PriceOnline' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'PriceOnline') + fval_ = self.gds_validate_float(fval_, node, 'PriceOnline') + self.PriceOnline = fval_ + self.PriceOnline_nsprefix_ = child_.prefix + elif nodeName_ == 'PriceCPP' and child_.text: + sval_ = child_.text + fval_ = self.gds_parse_float(sval_, node, 'PriceCPP') + fval_ = self.gds_validate_float(fval_, node, 'PriceCPP') + self.PriceCPP = fval_ + self.PriceCPP_nsprefix_ = child_.prefix + elif nodeName_ == 'DeclaredValueRequired': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'DeclaredValueRequired') + ival_ = self.gds_validate_boolean(ival_, node, 'DeclaredValueRequired') + self.DeclaredValueRequired = ival_ + self.DeclaredValueRequired_nsprefix_ = child_.prefix + elif nodeName_ == 'DueSenderRequired': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'DueSenderRequired') + ival_ = self.gds_validate_boolean(ival_, node, 'DueSenderRequired') + self.DueSenderRequired = ival_ + self.DueSenderRequired_nsprefix_ = child_.prefix +# end class SpecialServiceType + + +class RestrictionType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Restrictions=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Restrictions = Restrictions + self.Restrictions_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, RestrictionType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if RestrictionType.subclass: + return RestrictionType.subclass(*args_, **kwargs_) + else: + return RestrictionType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Restrictions(self): + return self.Restrictions + def set_Restrictions(self, Restrictions): + self.Restrictions = Restrictions + def has__content(self): + if ( + self.Restrictions is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RestrictionType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('RestrictionType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'RestrictionType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RestrictionType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RestrictionType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RestrictionType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RestrictionType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Restrictions is not None: + namespaceprefix_ = self.Restrictions_nsprefix_ + ':' if (UseCapturedNS_ and self.Restrictions_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRestrictions>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Restrictions), input_name='Restrictions')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Restrictions': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Restrictions') + value_ = self.gds_validate_string(value_, node, 'Restrictions') + self.Restrictions = value_ + self.Restrictions_nsprefix_ = child_.prefix +# end class RestrictionType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RateV4Response' + rootClass = RateV4Response + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RateV4Response' + rootClass = RateV4Response + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RateV4Response' + rootClass = RateV4Response + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RateV4Response' + rootClass = RateV4Response + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from rate_v4_response import *\n\n') + sys.stdout.write('import rate_v4_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "ErrorType", + "PackageType", + "PostageType", + "RateV4Response", + "RestrictionType", + "SpecialServiceType", + "SpecialServicesType" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/scan_request.py b/modules/connectors/usps_international/karrio/schemas/usps_international/scan_request.py new file mode 100644 index 0000000000..5616177575 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/scan_request.py @@ -0,0 +1,1855 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:08 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/scan_request.py') +# +# Command line arguments: +# ./schemas/SCANRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/scan_request.py" ./schemas/SCANRequest.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class SCANRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, Option=None, FromName=None, FromFirm=None, FromAddress1=None, FromAddress2=None, FromCity=None, FromState=None, FromZip5=None, FromZip4=None, Shipment=None, MailDate=None, MailTime=None, EntryFacility=None, ImageType=None, CustomerRefNo=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.Option = Option + self.Option_nsprefix_ = None + self.FromName = FromName + self.FromName_nsprefix_ = None + self.FromFirm = FromFirm + self.FromFirm_nsprefix_ = None + self.FromAddress1 = FromAddress1 + self.FromAddress1_nsprefix_ = None + self.FromAddress2 = FromAddress2 + self.FromAddress2_nsprefix_ = None + self.FromCity = FromCity + self.FromCity_nsprefix_ = None + self.FromState = FromState + self.FromState_nsprefix_ = None + self.FromZip5 = FromZip5 + self.FromZip5_nsprefix_ = None + self.FromZip4 = FromZip4 + self.FromZip4_nsprefix_ = None + self.Shipment = Shipment + self.Shipment_nsprefix_ = None + self.MailDate = MailDate + self.MailDate_nsprefix_ = None + self.MailTime = MailTime + self.MailTime_nsprefix_ = None + self.EntryFacility = EntryFacility + self.EntryFacility_nsprefix_ = None + self.ImageType = ImageType + self.ImageType_nsprefix_ = None + self.CustomerRefNo = CustomerRefNo + self.CustomerRefNo_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, SCANRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if SCANRequest.subclass: + return SCANRequest.subclass(*args_, **kwargs_) + else: + return SCANRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Option(self): + return self.Option + def set_Option(self, Option): + self.Option = Option + def get_FromName(self): + return self.FromName + def set_FromName(self, FromName): + self.FromName = FromName + def get_FromFirm(self): + return self.FromFirm + def set_FromFirm(self, FromFirm): + self.FromFirm = FromFirm + def get_FromAddress1(self): + return self.FromAddress1 + def set_FromAddress1(self, FromAddress1): + self.FromAddress1 = FromAddress1 + def get_FromAddress2(self): + return self.FromAddress2 + def set_FromAddress2(self, FromAddress2): + self.FromAddress2 = FromAddress2 + def get_FromCity(self): + return self.FromCity + def set_FromCity(self, FromCity): + self.FromCity = FromCity + def get_FromState(self): + return self.FromState + def set_FromState(self, FromState): + self.FromState = FromState + def get_FromZip5(self): + return self.FromZip5 + def set_FromZip5(self, FromZip5): + self.FromZip5 = FromZip5 + def get_FromZip4(self): + return self.FromZip4 + def set_FromZip4(self, FromZip4): + self.FromZip4 = FromZip4 + def get_Shipment(self): + return self.Shipment + def set_Shipment(self, Shipment): + self.Shipment = Shipment + def get_MailDate(self): + return self.MailDate + def set_MailDate(self, MailDate): + self.MailDate = MailDate + def get_MailTime(self): + return self.MailTime + def set_MailTime(self, MailTime): + self.MailTime = MailTime + def get_EntryFacility(self): + return self.EntryFacility + def set_EntryFacility(self, EntryFacility): + self.EntryFacility = EntryFacility + def get_ImageType(self): + return self.ImageType + def set_ImageType(self, ImageType): + self.ImageType = ImageType + def get_CustomerRefNo(self): + return self.CustomerRefNo + def set_CustomerRefNo(self, CustomerRefNo): + self.CustomerRefNo = CustomerRefNo + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.Option is not None or + self.FromName is not None or + self.FromFirm is not None or + self.FromAddress1 is not None or + self.FromAddress2 is not None or + self.FromCity is not None or + self.FromState is not None or + self.FromZip5 is not None or + self.FromZip4 is not None or + self.Shipment is not None or + self.MailDate is not None or + self.MailTime is not None or + self.EntryFacility is not None or + self.ImageType is not None or + self.CustomerRefNo is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SCANRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('SCANRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'SCANRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SCANRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SCANRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SCANRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SCANRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Option is not None: + namespaceprefix_ = self.Option_nsprefix_ + ':' if (UseCapturedNS_ and self.Option_nsprefix_) else '' + self.Option.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Option', pretty_print=pretty_print) + if self.FromName is not None: + namespaceprefix_ = self.FromName_nsprefix_ + ':' if (UseCapturedNS_ and self.FromName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromName), input_name='FromName')), namespaceprefix_ , eol_)) + if self.FromFirm is not None: + namespaceprefix_ = self.FromFirm_nsprefix_ + ':' if (UseCapturedNS_ and self.FromFirm_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromFirm>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromFirm), input_name='FromFirm')), namespaceprefix_ , eol_)) + if self.FromAddress1 is not None: + namespaceprefix_ = self.FromAddress1_nsprefix_ + ':' if (UseCapturedNS_ and self.FromAddress1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromAddress1), input_name='FromAddress1')), namespaceprefix_ , eol_)) + if self.FromAddress2 is not None: + namespaceprefix_ = self.FromAddress2_nsprefix_ + ':' if (UseCapturedNS_ and self.FromAddress2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromAddress2), input_name='FromAddress2')), namespaceprefix_ , eol_)) + if self.FromCity is not None: + namespaceprefix_ = self.FromCity_nsprefix_ + ':' if (UseCapturedNS_ and self.FromCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromCity), input_name='FromCity')), namespaceprefix_ , eol_)) + if self.FromState is not None: + namespaceprefix_ = self.FromState_nsprefix_ + ':' if (UseCapturedNS_ and self.FromState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FromState), input_name='FromState')), namespaceprefix_ , eol_)) + if self.FromZip5 is not None: + namespaceprefix_ = self.FromZip5_nsprefix_ + ':' if (UseCapturedNS_ and self.FromZip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromZip5>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.FromZip5, input_name='FromZip5'), namespaceprefix_ , eol_)) + if self.FromZip4 is not None: + namespaceprefix_ = self.FromZip4_nsprefix_ + ':' if (UseCapturedNS_ and self.FromZip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFromZip4>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.FromZip4, input_name='FromZip4'), namespaceprefix_ , eol_)) + if self.Shipment is not None: + namespaceprefix_ = self.Shipment_nsprefix_ + ':' if (UseCapturedNS_ and self.Shipment_nsprefix_) else '' + self.Shipment.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Shipment', pretty_print=pretty_print) + if self.MailDate is not None: + namespaceprefix_ = self.MailDate_nsprefix_ + ':' if (UseCapturedNS_ and self.MailDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMailDate>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.MailDate, input_name='MailDate'), namespaceprefix_ , eol_)) + if self.MailTime is not None: + namespaceprefix_ = self.MailTime_nsprefix_ + ':' if (UseCapturedNS_ and self.MailTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMailTime>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.MailTime, input_name='MailTime'), namespaceprefix_ , eol_)) + if self.EntryFacility is not None: + namespaceprefix_ = self.EntryFacility_nsprefix_ + ':' if (UseCapturedNS_ and self.EntryFacility_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEntryFacility>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.EntryFacility, input_name='EntryFacility'), namespaceprefix_ , eol_)) + if self.ImageType is not None: + namespaceprefix_ = self.ImageType_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sImageType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ImageType), input_name='ImageType')), namespaceprefix_ , eol_)) + if self.CustomerRefNo is not None: + namespaceprefix_ = self.CustomerRefNo_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerRefNo_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCustomerRefNo>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerRefNo), input_name='CustomerRefNo')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Option': + obj_ = OptionType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Option = obj_ + obj_.original_tagname_ = 'Option' + elif nodeName_ == 'FromName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromName') + value_ = self.gds_validate_string(value_, node, 'FromName') + self.FromName = value_ + self.FromName_nsprefix_ = child_.prefix + elif nodeName_ == 'FromFirm': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromFirm') + value_ = self.gds_validate_string(value_, node, 'FromFirm') + self.FromFirm = value_ + self.FromFirm_nsprefix_ = child_.prefix + elif nodeName_ == 'FromAddress1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromAddress1') + value_ = self.gds_validate_string(value_, node, 'FromAddress1') + self.FromAddress1 = value_ + self.FromAddress1_nsprefix_ = child_.prefix + elif nodeName_ == 'FromAddress2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromAddress2') + value_ = self.gds_validate_string(value_, node, 'FromAddress2') + self.FromAddress2 = value_ + self.FromAddress2_nsprefix_ = child_.prefix + elif nodeName_ == 'FromCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromCity') + value_ = self.gds_validate_string(value_, node, 'FromCity') + self.FromCity = value_ + self.FromCity_nsprefix_ = child_.prefix + elif nodeName_ == 'FromState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FromState') + value_ = self.gds_validate_string(value_, node, 'FromState') + self.FromState = value_ + self.FromState_nsprefix_ = child_.prefix + elif nodeName_ == 'FromZip5' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'FromZip5') + ival_ = self.gds_validate_integer(ival_, node, 'FromZip5') + self.FromZip5 = ival_ + self.FromZip5_nsprefix_ = child_.prefix + elif nodeName_ == 'FromZip4' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'FromZip4') + ival_ = self.gds_validate_integer(ival_, node, 'FromZip4') + self.FromZip4 = ival_ + self.FromZip4_nsprefix_ = child_.prefix + elif nodeName_ == 'Shipment': + obj_ = ShipmentType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Shipment = obj_ + obj_.original_tagname_ = 'Shipment' + elif nodeName_ == 'MailDate' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'MailDate') + ival_ = self.gds_validate_integer(ival_, node, 'MailDate') + self.MailDate = ival_ + self.MailDate_nsprefix_ = child_.prefix + elif nodeName_ == 'MailTime' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'MailTime') + ival_ = self.gds_validate_integer(ival_, node, 'MailTime') + self.MailTime = ival_ + self.MailTime_nsprefix_ = child_.prefix + elif nodeName_ == 'EntryFacility' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'EntryFacility') + ival_ = self.gds_validate_integer(ival_, node, 'EntryFacility') + self.EntryFacility = ival_ + self.EntryFacility_nsprefix_ = child_.prefix + elif nodeName_ == 'ImageType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ImageType') + value_ = self.gds_validate_string(value_, node, 'ImageType') + self.ImageType = value_ + self.ImageType_nsprefix_ = child_.prefix + elif nodeName_ == 'CustomerRefNo': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CustomerRefNo') + value_ = self.gds_validate_string(value_, node, 'CustomerRefNo') + self.CustomerRefNo = value_ + self.CustomerRefNo_nsprefix_ = child_.prefix +# end class SCANRequest + + +class OptionType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Form=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Form = Form + self.Form_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, OptionType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if OptionType.subclass: + return OptionType.subclass(*args_, **kwargs_) + else: + return OptionType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Form(self): + return self.Form + def set_Form(self, Form): + self.Form = Form + def has__content(self): + if ( + self.Form is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='OptionType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('OptionType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'OptionType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='OptionType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='OptionType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='OptionType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='OptionType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Form is not None: + namespaceprefix_ = self.Form_nsprefix_ + ':' if (UseCapturedNS_ and self.Form_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sForm>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Form, input_name='Form'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Form' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Form') + ival_ = self.gds_validate_integer(ival_, node, 'Form') + self.Form = ival_ + self.Form_nsprefix_ = child_.prefix +# end class OptionType + + +class ShipmentType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, PackageDetail=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.PackageDetail = PackageDetail + self.PackageDetail_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ShipmentType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ShipmentType.subclass: + return ShipmentType.subclass(*args_, **kwargs_) + else: + return ShipmentType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_PackageDetail(self): + return self.PackageDetail + def set_PackageDetail(self, PackageDetail): + self.PackageDetail = PackageDetail + def has__content(self): + if ( + self.PackageDetail is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ShipmentType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ShipmentType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ShipmentType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ShipmentType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ShipmentType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ShipmentType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ShipmentType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.PackageDetail is not None: + namespaceprefix_ = self.PackageDetail_nsprefix_ + ':' if (UseCapturedNS_ and self.PackageDetail_nsprefix_) else '' + self.PackageDetail.export(outfile, level, namespaceprefix_, namespacedef_='', name_='PackageDetail', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'PackageDetail': + obj_ = PackageDetailType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.PackageDetail = obj_ + obj_.original_tagname_ = 'PackageDetail' +# end class ShipmentType + + +class PackageDetailType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, PkgBarcode=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.PkgBarcode = PkgBarcode + self.PkgBarcode_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PackageDetailType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PackageDetailType.subclass: + return PackageDetailType.subclass(*args_, **kwargs_) + else: + return PackageDetailType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_PkgBarcode(self): + return self.PkgBarcode + def set_PkgBarcode(self, PkgBarcode): + self.PkgBarcode = PkgBarcode + def has__content(self): + if ( + self.PkgBarcode is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageDetailType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PackageDetailType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'PackageDetailType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PackageDetailType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PackageDetailType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PackageDetailType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PackageDetailType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.PkgBarcode is not None: + namespaceprefix_ = self.PkgBarcode_nsprefix_ + ':' if (UseCapturedNS_ and self.PkgBarcode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPkgBarcode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PkgBarcode), input_name='PkgBarcode')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'PkgBarcode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PkgBarcode') + value_ = self.gds_validate_string(value_, node, 'PkgBarcode') + self.PkgBarcode = value_ + self.PkgBarcode_nsprefix_ = child_.prefix +# end class PackageDetailType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SCANRequest' + rootClass = SCANRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SCANRequest' + rootClass = SCANRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SCANRequest' + rootClass = SCANRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SCANRequest' + rootClass = SCANRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from scan_request import *\n\n') + sys.stdout.write('import scan_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "OptionType", + "PackageDetailType", + "SCANRequest", + "ShipmentType" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/scan_response.py b/modules/connectors/usps_international/karrio/schemas/usps_international/scan_response.py new file mode 100644 index 0000000000..40d659765a --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/scan_response.py @@ -0,0 +1,1536 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:08 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/scan_response.py') +# +# Command line arguments: +# ./schemas/SCANResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/scan_response.py" ./schemas/SCANResponse.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class SCANResponse(GeneratedsSuper): + """SCANFormImage -- 65255 skipped + + """ + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, SCANFormNumber=None, SCANFormImage=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.SCANFormNumber = SCANFormNumber + self.SCANFormNumber_nsprefix_ = None + self.SCANFormImage = SCANFormImage + self.SCANFormImage_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, SCANResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if SCANResponse.subclass: + return SCANResponse.subclass(*args_, **kwargs_) + else: + return SCANResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_SCANFormNumber(self): + return self.SCANFormNumber + def set_SCANFormNumber(self, SCANFormNumber): + self.SCANFormNumber = SCANFormNumber + def get_SCANFormImage(self): + return self.SCANFormImage + def set_SCANFormImage(self, SCANFormImage): + self.SCANFormImage = SCANFormImage + def has__content(self): + if ( + self.SCANFormNumber is not None or + self.SCANFormImage is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SCANResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('SCANResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'SCANResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SCANResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SCANResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SCANResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SCANResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.SCANFormNumber is not None: + namespaceprefix_ = self.SCANFormNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.SCANFormNumber_nsprefix_) else '' + self.SCANFormNumber.export(outfile, level, namespaceprefix_, namespacedef_='', name_='SCANFormNumber', pretty_print=pretty_print) + if self.SCANFormImage is not None: + namespaceprefix_ = self.SCANFormImage_nsprefix_ + ':' if (UseCapturedNS_ and self.SCANFormImage_nsprefix_) else '' + self.SCANFormImage.export(outfile, level, namespaceprefix_, namespacedef_='', name_='SCANFormImage', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'SCANFormNumber': + obj_ = SCANFormNumberType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.SCANFormNumber = obj_ + obj_.original_tagname_ = 'SCANFormNumber' + elif nodeName_ == 'SCANFormImage': + obj_ = SCANFormImageType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.SCANFormImage = obj_ + obj_.original_tagname_ = 'SCANFormImage' +# end class SCANResponse + + +class SCANFormNumberType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ShipDate=None, EntryZipCode=None, valueOf_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ShipDate = _cast(None, ShipDate) + self.ShipDate_nsprefix_ = None + self.EntryZipCode = _cast(int, EntryZipCode) + self.EntryZipCode_nsprefix_ = None + self.valueOf_ = valueOf_ + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, SCANFormNumberType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if SCANFormNumberType.subclass: + return SCANFormNumberType.subclass(*args_, **kwargs_) + else: + return SCANFormNumberType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ShipDate(self): + return self.ShipDate + def set_ShipDate(self, ShipDate): + self.ShipDate = ShipDate + def get_EntryZipCode(self): + return self.EntryZipCode + def set_EntryZipCode(self, EntryZipCode): + self.EntryZipCode = EntryZipCode + def get_valueOf_(self): return self.valueOf_ + def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_ + def has__content(self): + if ( + (1 if type(self.valueOf_) in [int,float] else self.valueOf_) + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SCANFormNumberType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('SCANFormNumberType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'SCANFormNumberType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SCANFormNumberType') + outfile.write('>') + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_, pretty_print=pretty_print) + outfile.write(self.convert_unicode(self.valueOf_)) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SCANFormNumberType'): + if self.ShipDate is not None and 'ShipDate' not in already_processed: + already_processed.add('ShipDate') + outfile.write(' ShipDate=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ShipDate), input_name='ShipDate')), )) + if self.EntryZipCode is not None and 'EntryZipCode' not in already_processed: + already_processed.add('EntryZipCode') + outfile.write(' EntryZipCode="%s"' % self.gds_format_integer(self.EntryZipCode, input_name='EntryZipCode')) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SCANFormNumberType', fromsubclass_=False, pretty_print=True): + pass + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ShipDate', node) + if value is not None and 'ShipDate' not in already_processed: + already_processed.add('ShipDate') + self.ShipDate = value + value = find_attr_value_('EntryZipCode', node) + if value is not None and 'EntryZipCode' not in already_processed: + already_processed.add('EntryZipCode') + self.EntryZipCode = self.gds_parse_integer(value, node, 'EntryZipCode') + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + pass +# end class SCANFormNumberType + + +class SCANFormImageType(GeneratedsSuper): + """SCANFormImageType -- 65255 skipped + + """ + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ShipDate=None, EntryZipCode=None, valueOf_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ShipDate = _cast(None, ShipDate) + self.ShipDate_nsprefix_ = None + self.EntryZipCode = _cast(int, EntryZipCode) + self.EntryZipCode_nsprefix_ = None + self.valueOf_ = valueOf_ + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, SCANFormImageType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if SCANFormImageType.subclass: + return SCANFormImageType.subclass(*args_, **kwargs_) + else: + return SCANFormImageType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ShipDate(self): + return self.ShipDate + def set_ShipDate(self, ShipDate): + self.ShipDate = ShipDate + def get_EntryZipCode(self): + return self.EntryZipCode + def set_EntryZipCode(self, EntryZipCode): + self.EntryZipCode = EntryZipCode + def get_valueOf_(self): return self.valueOf_ + def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_ + def has__content(self): + if ( + (1 if type(self.valueOf_) in [int,float] else self.valueOf_) + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SCANFormImageType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('SCANFormImageType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'SCANFormImageType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SCANFormImageType') + outfile.write('>') + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_, pretty_print=pretty_print) + outfile.write(self.convert_unicode(self.valueOf_)) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SCANFormImageType'): + if self.ShipDate is not None and 'ShipDate' not in already_processed: + already_processed.add('ShipDate') + outfile.write(' ShipDate=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ShipDate), input_name='ShipDate')), )) + if self.EntryZipCode is not None and 'EntryZipCode' not in already_processed: + already_processed.add('EntryZipCode') + outfile.write(' EntryZipCode="%s"' % self.gds_format_integer(self.EntryZipCode, input_name='EntryZipCode')) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SCANFormImageType', fromsubclass_=False, pretty_print=True): + pass + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ShipDate', node) + if value is not None and 'ShipDate' not in already_processed: + already_processed.add('ShipDate') + self.ShipDate = value + value = find_attr_value_('EntryZipCode', node) + if value is not None and 'EntryZipCode' not in already_processed: + already_processed.add('EntryZipCode') + self.EntryZipCode = self.gds_parse_integer(value, node, 'EntryZipCode') + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + pass +# end class SCANFormImageType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SCANResponse' + rootClass = SCANResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SCANResponse' + rootClass = SCANResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SCANResponse' + rootClass = SCANResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SCANResponse' + rootClass = SCANResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from scan_response import *\n\n') + sys.stdout.write('import scan_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "SCANFormImageType", + "SCANFormNumberType", + "SCANResponse" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/sdc_get_locations_request.py b/modules/connectors/usps_international/karrio/schemas/usps_international/sdc_get_locations_request.py new file mode 100644 index 0000000000..4ea74b8d5c --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/sdc_get_locations_request.py @@ -0,0 +1,1437 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:08 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/sdc_get_locations_request.py') +# +# Command line arguments: +# ./schemas/SDCGetLocationsRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/sdc_get_locations_request.py" ./schemas/SDCGetLocationsRequest.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class SDCGetLocationsRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, MailClass=None, OriginZIP=None, DestinationZIP=None, AcceptDate=None, AcceptTime=None, NonExpeditedDetail=None, ClientType=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.MailClass = MailClass + self.MailClass_nsprefix_ = None + self.OriginZIP = OriginZIP + self.OriginZIP_nsprefix_ = None + self.DestinationZIP = DestinationZIP + self.DestinationZIP_nsprefix_ = None + self.AcceptDate = AcceptDate + self.AcceptDate_nsprefix_ = None + self.AcceptTime = AcceptTime + self.AcceptTime_nsprefix_ = None + self.NonExpeditedDetail = NonExpeditedDetail + self.NonExpeditedDetail_nsprefix_ = None + self.ClientType = ClientType + self.ClientType_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, SDCGetLocationsRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if SDCGetLocationsRequest.subclass: + return SDCGetLocationsRequest.subclass(*args_, **kwargs_) + else: + return SDCGetLocationsRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_MailClass(self): + return self.MailClass + def set_MailClass(self, MailClass): + self.MailClass = MailClass + def get_OriginZIP(self): + return self.OriginZIP + def set_OriginZIP(self, OriginZIP): + self.OriginZIP = OriginZIP + def get_DestinationZIP(self): + return self.DestinationZIP + def set_DestinationZIP(self, DestinationZIP): + self.DestinationZIP = DestinationZIP + def get_AcceptDate(self): + return self.AcceptDate + def set_AcceptDate(self, AcceptDate): + self.AcceptDate = AcceptDate + def get_AcceptTime(self): + return self.AcceptTime + def set_AcceptTime(self, AcceptTime): + self.AcceptTime = AcceptTime + def get_NonExpeditedDetail(self): + return self.NonExpeditedDetail + def set_NonExpeditedDetail(self, NonExpeditedDetail): + self.NonExpeditedDetail = NonExpeditedDetail + def get_ClientType(self): + return self.ClientType + def set_ClientType(self, ClientType): + self.ClientType = ClientType + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.MailClass is not None or + self.OriginZIP is not None or + self.DestinationZIP is not None or + self.AcceptDate is not None or + self.AcceptTime is not None or + self.NonExpeditedDetail is not None or + self.ClientType is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SDCGetLocationsRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('SDCGetLocationsRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'SDCGetLocationsRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SDCGetLocationsRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SDCGetLocationsRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SDCGetLocationsRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SDCGetLocationsRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.MailClass is not None: + namespaceprefix_ = self.MailClass_nsprefix_ + ':' if (UseCapturedNS_ and self.MailClass_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMailClass>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.MailClass, input_name='MailClass'), namespaceprefix_ , eol_)) + if self.OriginZIP is not None: + namespaceprefix_ = self.OriginZIP_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginZIP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginZIP>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.OriginZIP, input_name='OriginZIP'), namespaceprefix_ , eol_)) + if self.DestinationZIP is not None: + namespaceprefix_ = self.DestinationZIP_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationZIP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationZIP>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.DestinationZIP, input_name='DestinationZIP'), namespaceprefix_ , eol_)) + if self.AcceptDate is not None: + namespaceprefix_ = self.AcceptDate_nsprefix_ + ':' if (UseCapturedNS_ and self.AcceptDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAcceptDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AcceptDate), input_name='AcceptDate')), namespaceprefix_ , eol_)) + if self.AcceptTime is not None: + namespaceprefix_ = self.AcceptTime_nsprefix_ + ':' if (UseCapturedNS_ and self.AcceptTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAcceptTime>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.AcceptTime, input_name='AcceptTime'), namespaceprefix_ , eol_)) + if self.NonExpeditedDetail is not None: + namespaceprefix_ = self.NonExpeditedDetail_nsprefix_ + ':' if (UseCapturedNS_ and self.NonExpeditedDetail_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNonExpeditedDetail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.NonExpeditedDetail), input_name='NonExpeditedDetail')), namespaceprefix_ , eol_)) + if self.ClientType is not None: + namespaceprefix_ = self.ClientType_nsprefix_ + ':' if (UseCapturedNS_ and self.ClientType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sClientType>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ClientType, input_name='ClientType'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'MailClass' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'MailClass') + ival_ = self.gds_validate_integer(ival_, node, 'MailClass') + self.MailClass = ival_ + self.MailClass_nsprefix_ = child_.prefix + elif nodeName_ == 'OriginZIP' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'OriginZIP') + ival_ = self.gds_validate_integer(ival_, node, 'OriginZIP') + self.OriginZIP = ival_ + self.OriginZIP_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationZIP' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'DestinationZIP') + ival_ = self.gds_validate_integer(ival_, node, 'DestinationZIP') + self.DestinationZIP = ival_ + self.DestinationZIP_nsprefix_ = child_.prefix + elif nodeName_ == 'AcceptDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AcceptDate') + value_ = self.gds_validate_string(value_, node, 'AcceptDate') + self.AcceptDate = value_ + self.AcceptDate_nsprefix_ = child_.prefix + elif nodeName_ == 'AcceptTime' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'AcceptTime') + ival_ = self.gds_validate_integer(ival_, node, 'AcceptTime') + self.AcceptTime = ival_ + self.AcceptTime_nsprefix_ = child_.prefix + elif nodeName_ == 'NonExpeditedDetail': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'NonExpeditedDetail') + value_ = self.gds_validate_string(value_, node, 'NonExpeditedDetail') + self.NonExpeditedDetail = value_ + self.NonExpeditedDetail_nsprefix_ = child_.prefix + elif nodeName_ == 'ClientType' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ClientType') + ival_ = self.gds_validate_integer(ival_, node, 'ClientType') + self.ClientType = ival_ + self.ClientType_nsprefix_ = child_.prefix +# end class SDCGetLocationsRequest + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SDCGetLocationsRequest' + rootClass = SDCGetLocationsRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SDCGetLocationsRequest' + rootClass = SDCGetLocationsRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SDCGetLocationsRequest' + rootClass = SDCGetLocationsRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SDCGetLocationsRequest' + rootClass = SDCGetLocationsRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from sdc_get_locations_request import *\n\n') + sys.stdout.write('import sdc_get_locations_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "SDCGetLocationsRequest" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/sdc_get_locations_response.py b/modules/connectors/usps_international/karrio/schemas/usps_international/sdc_get_locations_response.py new file mode 100644 index 0000000000..866c4edabe --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/sdc_get_locations_response.py @@ -0,0 +1,3217 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:08 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/sdc_get_locations_response.py') +# +# Command line arguments: +# ./schemas/SDCGetLocationsResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/sdc_get_locations_response.py" ./schemas/SDCGetLocationsResponse.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class SDCGetLocationsResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Release=None, MailClass=None, OriginZIP=None, OriginCity=None, OriginState=None, DestZIP=None, DestCity=None, DestState=None, AcceptDate=None, AcceptTime=None, Expedited=None, NonExpedited=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.Release = Release + self.Release_nsprefix_ = None + self.MailClass = MailClass + self.MailClass_nsprefix_ = None + self.OriginZIP = OriginZIP + self.OriginZIP_nsprefix_ = None + self.OriginCity = OriginCity + self.OriginCity_nsprefix_ = None + self.OriginState = OriginState + self.OriginState_nsprefix_ = None + self.DestZIP = DestZIP + self.DestZIP_nsprefix_ = None + self.DestCity = DestCity + self.DestCity_nsprefix_ = None + self.DestState = DestState + self.DestState_nsprefix_ = None + if isinstance(AcceptDate, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(AcceptDate, '%Y-%m-%d').date() + else: + initvalue_ = AcceptDate + self.AcceptDate = initvalue_ + self.AcceptDate_nsprefix_ = None + self.AcceptTime = AcceptTime + self.AcceptTime_nsprefix_ = None + self.Expedited = Expedited + self.Expedited_nsprefix_ = None + if NonExpedited is None: + self.NonExpedited = [] + else: + self.NonExpedited = NonExpedited + self.NonExpedited_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, SDCGetLocationsResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if SDCGetLocationsResponse.subclass: + return SDCGetLocationsResponse.subclass(*args_, **kwargs_) + else: + return SDCGetLocationsResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Release(self): + return self.Release + def set_Release(self, Release): + self.Release = Release + def get_MailClass(self): + return self.MailClass + def set_MailClass(self, MailClass): + self.MailClass = MailClass + def get_OriginZIP(self): + return self.OriginZIP + def set_OriginZIP(self, OriginZIP): + self.OriginZIP = OriginZIP + def get_OriginCity(self): + return self.OriginCity + def set_OriginCity(self, OriginCity): + self.OriginCity = OriginCity + def get_OriginState(self): + return self.OriginState + def set_OriginState(self, OriginState): + self.OriginState = OriginState + def get_DestZIP(self): + return self.DestZIP + def set_DestZIP(self, DestZIP): + self.DestZIP = DestZIP + def get_DestCity(self): + return self.DestCity + def set_DestCity(self, DestCity): + self.DestCity = DestCity + def get_DestState(self): + return self.DestState + def set_DestState(self, DestState): + self.DestState = DestState + def get_AcceptDate(self): + return self.AcceptDate + def set_AcceptDate(self, AcceptDate): + self.AcceptDate = AcceptDate + def get_AcceptTime(self): + return self.AcceptTime + def set_AcceptTime(self, AcceptTime): + self.AcceptTime = AcceptTime + def get_Expedited(self): + return self.Expedited + def set_Expedited(self, Expedited): + self.Expedited = Expedited + def get_NonExpedited(self): + return self.NonExpedited + def set_NonExpedited(self, NonExpedited): + self.NonExpedited = NonExpedited + def add_NonExpedited(self, value): + self.NonExpedited.append(value) + def insert_NonExpedited_at(self, index, value): + self.NonExpedited.insert(index, value) + def replace_NonExpedited_at(self, index, value): + self.NonExpedited[index] = value + def has__content(self): + if ( + self.Release is not None or + self.MailClass is not None or + self.OriginZIP is not None or + self.OriginCity is not None or + self.OriginState is not None or + self.DestZIP is not None or + self.DestCity is not None or + self.DestState is not None or + self.AcceptDate is not None or + self.AcceptTime is not None or + self.Expedited is not None or + self.NonExpedited + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SDCGetLocationsResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('SDCGetLocationsResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'SDCGetLocationsResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SDCGetLocationsResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SDCGetLocationsResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SDCGetLocationsResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SDCGetLocationsResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Release is not None: + namespaceprefix_ = self.Release_nsprefix_ + ':' if (UseCapturedNS_ and self.Release_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRelease>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Release, input_name='Release'), namespaceprefix_ , eol_)) + if self.MailClass is not None: + namespaceprefix_ = self.MailClass_nsprefix_ + ':' if (UseCapturedNS_ and self.MailClass_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMailClass>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.MailClass, input_name='MailClass'), namespaceprefix_ , eol_)) + if self.OriginZIP is not None: + namespaceprefix_ = self.OriginZIP_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginZIP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginZIP>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.OriginZIP, input_name='OriginZIP'), namespaceprefix_ , eol_)) + if self.OriginCity is not None: + namespaceprefix_ = self.OriginCity_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.OriginCity), input_name='OriginCity')), namespaceprefix_ , eol_)) + if self.OriginState is not None: + namespaceprefix_ = self.OriginState_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.OriginState), input_name='OriginState')), namespaceprefix_ , eol_)) + if self.DestZIP is not None: + namespaceprefix_ = self.DestZIP_nsprefix_ + ':' if (UseCapturedNS_ and self.DestZIP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestZIP>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.DestZIP, input_name='DestZIP'), namespaceprefix_ , eol_)) + if self.DestCity is not None: + namespaceprefix_ = self.DestCity_nsprefix_ + ':' if (UseCapturedNS_ and self.DestCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DestCity), input_name='DestCity')), namespaceprefix_ , eol_)) + if self.DestState is not None: + namespaceprefix_ = self.DestState_nsprefix_ + ':' if (UseCapturedNS_ and self.DestState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DestState), input_name='DestState')), namespaceprefix_ , eol_)) + if self.AcceptDate is not None: + namespaceprefix_ = self.AcceptDate_nsprefix_ + ':' if (UseCapturedNS_ and self.AcceptDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAcceptDate>%s%s' % (namespaceprefix_ , self.gds_format_date(self.AcceptDate, input_name='AcceptDate'), namespaceprefix_ , eol_)) + if self.AcceptTime is not None: + namespaceprefix_ = self.AcceptTime_nsprefix_ + ':' if (UseCapturedNS_ and self.AcceptTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAcceptTime>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.AcceptTime, input_name='AcceptTime'), namespaceprefix_ , eol_)) + if self.Expedited is not None: + namespaceprefix_ = self.Expedited_nsprefix_ + ':' if (UseCapturedNS_ and self.Expedited_nsprefix_) else '' + self.Expedited.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Expedited', pretty_print=pretty_print) + for NonExpedited_ in self.NonExpedited: + namespaceprefix_ = self.NonExpedited_nsprefix_ + ':' if (UseCapturedNS_ and self.NonExpedited_nsprefix_) else '' + NonExpedited_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='NonExpedited', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Release' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Release') + ival_ = self.gds_validate_integer(ival_, node, 'Release') + self.Release = ival_ + self.Release_nsprefix_ = child_.prefix + elif nodeName_ == 'MailClass' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'MailClass') + ival_ = self.gds_validate_integer(ival_, node, 'MailClass') + self.MailClass = ival_ + self.MailClass_nsprefix_ = child_.prefix + elif nodeName_ == 'OriginZIP' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'OriginZIP') + ival_ = self.gds_validate_integer(ival_, node, 'OriginZIP') + self.OriginZIP = ival_ + self.OriginZIP_nsprefix_ = child_.prefix + elif nodeName_ == 'OriginCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'OriginCity') + value_ = self.gds_validate_string(value_, node, 'OriginCity') + self.OriginCity = value_ + self.OriginCity_nsprefix_ = child_.prefix + elif nodeName_ == 'OriginState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'OriginState') + value_ = self.gds_validate_string(value_, node, 'OriginState') + self.OriginState = value_ + self.OriginState_nsprefix_ = child_.prefix + elif nodeName_ == 'DestZIP' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'DestZIP') + ival_ = self.gds_validate_integer(ival_, node, 'DestZIP') + self.DestZIP = ival_ + self.DestZIP_nsprefix_ = child_.prefix + elif nodeName_ == 'DestCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DestCity') + value_ = self.gds_validate_string(value_, node, 'DestCity') + self.DestCity = value_ + self.DestCity_nsprefix_ = child_.prefix + elif nodeName_ == 'DestState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DestState') + value_ = self.gds_validate_string(value_, node, 'DestState') + self.DestState = value_ + self.DestState_nsprefix_ = child_.prefix + elif nodeName_ == 'AcceptDate': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.AcceptDate = dval_ + self.AcceptDate_nsprefix_ = child_.prefix + elif nodeName_ == 'AcceptTime' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'AcceptTime') + ival_ = self.gds_validate_integer(ival_, node, 'AcceptTime') + self.AcceptTime = ival_ + self.AcceptTime_nsprefix_ = child_.prefix + elif nodeName_ == 'Expedited': + obj_ = ExpeditedType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Expedited = obj_ + obj_.original_tagname_ = 'Expedited' + elif nodeName_ == 'NonExpedited': + obj_ = NonExpeditedType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.NonExpedited.append(obj_) + obj_.original_tagname_ = 'NonExpedited' +# end class SDCGetLocationsResponse + + +class ExpeditedType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, EAD=None, Commitment=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if isinstance(EAD, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(EAD, '%Y-%m-%d').date() + else: + initvalue_ = EAD + self.EAD = initvalue_ + self.EAD_nsprefix_ = None + if Commitment is None: + self.Commitment = [] + else: + self.Commitment = Commitment + self.Commitment_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ExpeditedType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ExpeditedType.subclass: + return ExpeditedType.subclass(*args_, **kwargs_) + else: + return ExpeditedType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_EAD(self): + return self.EAD + def set_EAD(self, EAD): + self.EAD = EAD + def get_Commitment(self): + return self.Commitment + def set_Commitment(self, Commitment): + self.Commitment = Commitment + def add_Commitment(self, value): + self.Commitment.append(value) + def insert_Commitment_at(self, index, value): + self.Commitment.insert(index, value) + def replace_Commitment_at(self, index, value): + self.Commitment[index] = value + def has__content(self): + if ( + self.EAD is not None or + self.Commitment + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpeditedType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExpeditedType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ExpeditedType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpeditedType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExpeditedType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExpeditedType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpeditedType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.EAD is not None: + namespaceprefix_ = self.EAD_nsprefix_ + ':' if (UseCapturedNS_ and self.EAD_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEAD>%s%s' % (namespaceprefix_ , self.gds_format_date(self.EAD, input_name='EAD'), namespaceprefix_ , eol_)) + for Commitment_ in self.Commitment: + namespaceprefix_ = self.Commitment_nsprefix_ + ':' if (UseCapturedNS_ and self.Commitment_nsprefix_) else '' + Commitment_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Commitment', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'EAD': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.EAD = dval_ + self.EAD_nsprefix_ = child_.prefix + elif nodeName_ == 'Commitment': + obj_ = CommitmentType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Commitment.append(obj_) + obj_.original_tagname_ = 'Commitment' +# end class ExpeditedType + + +class CommitmentType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, MailClass=None, CommitmentName=None, CommitmentTime=None, CommitmentSeq=None, Location=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.MailClass = MailClass + self.MailClass_nsprefix_ = None + self.CommitmentName = CommitmentName + self.CommitmentName_nsprefix_ = None + self.CommitmentTime = CommitmentTime + self.CommitmentTime_nsprefix_ = None + self.CommitmentSeq = CommitmentSeq + self.CommitmentSeq_nsprefix_ = None + if Location is None: + self.Location = [] + else: + self.Location = Location + self.Location_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CommitmentType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CommitmentType.subclass: + return CommitmentType.subclass(*args_, **kwargs_) + else: + return CommitmentType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_MailClass(self): + return self.MailClass + def set_MailClass(self, MailClass): + self.MailClass = MailClass + def get_CommitmentName(self): + return self.CommitmentName + def set_CommitmentName(self, CommitmentName): + self.CommitmentName = CommitmentName + def get_CommitmentTime(self): + return self.CommitmentTime + def set_CommitmentTime(self, CommitmentTime): + self.CommitmentTime = CommitmentTime + def get_CommitmentSeq(self): + return self.CommitmentSeq + def set_CommitmentSeq(self, CommitmentSeq): + self.CommitmentSeq = CommitmentSeq + def get_Location(self): + return self.Location + def set_Location(self, Location): + self.Location = Location + def add_Location(self, value): + self.Location.append(value) + def insert_Location_at(self, index, value): + self.Location.insert(index, value) + def replace_Location_at(self, index, value): + self.Location[index] = value + def has__content(self): + if ( + self.MailClass is not None or + self.CommitmentName is not None or + self.CommitmentTime is not None or + self.CommitmentSeq is not None or + self.Location + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CommitmentType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CommitmentType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CommitmentType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CommitmentType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CommitmentType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CommitmentType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CommitmentType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.MailClass is not None: + namespaceprefix_ = self.MailClass_nsprefix_ + ':' if (UseCapturedNS_ and self.MailClass_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMailClass>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.MailClass, input_name='MailClass'), namespaceprefix_ , eol_)) + if self.CommitmentName is not None: + namespaceprefix_ = self.CommitmentName_nsprefix_ + ':' if (UseCapturedNS_ and self.CommitmentName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommitmentName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CommitmentName), input_name='CommitmentName')), namespaceprefix_ , eol_)) + if self.CommitmentTime is not None: + namespaceprefix_ = self.CommitmentTime_nsprefix_ + ':' if (UseCapturedNS_ and self.CommitmentTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommitmentTime>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.CommitmentTime, input_name='CommitmentTime'), namespaceprefix_ , eol_)) + if self.CommitmentSeq is not None: + namespaceprefix_ = self.CommitmentSeq_nsprefix_ + ':' if (UseCapturedNS_ and self.CommitmentSeq_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCommitmentSeq>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CommitmentSeq), input_name='CommitmentSeq')), namespaceprefix_ , eol_)) + for Location_ in self.Location: + namespaceprefix_ = self.Location_nsprefix_ + ':' if (UseCapturedNS_ and self.Location_nsprefix_) else '' + Location_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Location', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'MailClass' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'MailClass') + ival_ = self.gds_validate_integer(ival_, node, 'MailClass') + self.MailClass = ival_ + self.MailClass_nsprefix_ = child_.prefix + elif nodeName_ == 'CommitmentName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CommitmentName') + value_ = self.gds_validate_string(value_, node, 'CommitmentName') + self.CommitmentName = value_ + self.CommitmentName_nsprefix_ = child_.prefix + elif nodeName_ == 'CommitmentTime' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'CommitmentTime') + ival_ = self.gds_validate_integer(ival_, node, 'CommitmentTime') + self.CommitmentTime = ival_ + self.CommitmentTime_nsprefix_ = child_.prefix + elif nodeName_ == 'CommitmentSeq': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'CommitmentSeq') + value_ = self.gds_validate_string(value_, node, 'CommitmentSeq') + self.CommitmentSeq = value_ + self.CommitmentSeq_nsprefix_ = child_.prefix + elif nodeName_ == 'Location': + obj_ = LocationType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Location.append(obj_) + obj_.original_tagname_ = 'Location' +# end class CommitmentType + + +class LocationType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, SDD=None, COT=None, FacType=None, Street=None, City=None, State=None, ZIP=None, IsGuaranteed=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if isinstance(SDD, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(SDD, '%Y-%m-%d').date() + else: + initvalue_ = SDD + self.SDD = initvalue_ + self.SDD_nsprefix_ = None + self.COT = COT + self.COT_nsprefix_ = None + self.FacType = FacType + self.FacType_nsprefix_ = None + self.Street = Street + self.Street_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.ZIP = ZIP + self.ZIP_nsprefix_ = None + self.IsGuaranteed = IsGuaranteed + self.IsGuaranteed_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, LocationType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if LocationType.subclass: + return LocationType.subclass(*args_, **kwargs_) + else: + return LocationType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_SDD(self): + return self.SDD + def set_SDD(self, SDD): + self.SDD = SDD + def get_COT(self): + return self.COT + def set_COT(self, COT): + self.COT = COT + def get_FacType(self): + return self.FacType + def set_FacType(self, FacType): + self.FacType = FacType + def get_Street(self): + return self.Street + def set_Street(self, Street): + self.Street = Street + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_ZIP(self): + return self.ZIP + def set_ZIP(self, ZIP): + self.ZIP = ZIP + def get_IsGuaranteed(self): + return self.IsGuaranteed + def set_IsGuaranteed(self, IsGuaranteed): + self.IsGuaranteed = IsGuaranteed + def has__content(self): + if ( + self.SDD is not None or + self.COT is not None or + self.FacType is not None or + self.Street is not None or + self.City is not None or + self.State is not None or + self.ZIP is not None or + self.IsGuaranteed is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LocationType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('LocationType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'LocationType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LocationType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LocationType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LocationType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LocationType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.SDD is not None: + namespaceprefix_ = self.SDD_nsprefix_ + ':' if (UseCapturedNS_ and self.SDD_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSDD>%s%s' % (namespaceprefix_ , self.gds_format_date(self.SDD, input_name='SDD'), namespaceprefix_ , eol_)) + if self.COT is not None: + namespaceprefix_ = self.COT_nsprefix_ + ':' if (UseCapturedNS_ and self.COT_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCOT>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.COT, input_name='COT'), namespaceprefix_ , eol_)) + if self.FacType is not None: + namespaceprefix_ = self.FacType_nsprefix_ + ':' if (UseCapturedNS_ and self.FacType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFacType>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FacType), input_name='FacType')), namespaceprefix_ , eol_)) + if self.Street is not None: + namespaceprefix_ = self.Street_nsprefix_ + ':' if (UseCapturedNS_ and self.Street_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sStreet>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Street), input_name='Street')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.ZIP is not None: + namespaceprefix_ = self.ZIP_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ZIP, input_name='ZIP'), namespaceprefix_ , eol_)) + if self.IsGuaranteed is not None: + namespaceprefix_ = self.IsGuaranteed_nsprefix_ + ':' if (UseCapturedNS_ and self.IsGuaranteed_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sIsGuaranteed>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.IsGuaranteed, input_name='IsGuaranteed'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'SDD': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.SDD = dval_ + self.SDD_nsprefix_ = child_.prefix + elif nodeName_ == 'COT' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'COT') + ival_ = self.gds_validate_integer(ival_, node, 'COT') + self.COT = ival_ + self.COT_nsprefix_ = child_.prefix + elif nodeName_ == 'FacType': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FacType') + value_ = self.gds_validate_string(value_, node, 'FacType') + self.FacType = value_ + self.FacType_nsprefix_ = child_.prefix + elif nodeName_ == 'Street': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Street') + value_ = self.gds_validate_string(value_, node, 'Street') + self.Street = value_ + self.Street_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ZIP') + ival_ = self.gds_validate_integer(ival_, node, 'ZIP') + self.ZIP = ival_ + self.ZIP_nsprefix_ = child_.prefix + elif nodeName_ == 'IsGuaranteed' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'IsGuaranteed') + ival_ = self.gds_validate_integer(ival_, node, 'IsGuaranteed') + self.IsGuaranteed = ival_ + self.IsGuaranteed_nsprefix_ = child_.prefix +# end class LocationType + + +class NonExpeditedType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, MailClass=None, NonExpeditedDestType=None, EAD=None, COT=None, SvcStdMsg=None, SvcStdDays=None, TotDaysDeliver=None, SchedDlvryDate=None, NonDlvryDays=None, NonExpeditedExceptions=None, HFPU=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.MailClass = MailClass + self.MailClass_nsprefix_ = None + self.NonExpeditedDestType = NonExpeditedDestType + self.NonExpeditedDestType_nsprefix_ = None + if isinstance(EAD, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(EAD, '%Y-%m-%d').date() + else: + initvalue_ = EAD + self.EAD = initvalue_ + self.EAD_nsprefix_ = None + self.COT = COT + self.COT_nsprefix_ = None + self.SvcStdMsg = SvcStdMsg + self.SvcStdMsg_nsprefix_ = None + self.SvcStdDays = SvcStdDays + self.SvcStdDays_nsprefix_ = None + self.TotDaysDeliver = TotDaysDeliver + self.TotDaysDeliver_nsprefix_ = None + if isinstance(SchedDlvryDate, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(SchedDlvryDate, '%Y-%m-%d').date() + else: + initvalue_ = SchedDlvryDate + self.SchedDlvryDate = initvalue_ + self.SchedDlvryDate_nsprefix_ = None + self.NonDlvryDays = NonDlvryDays + self.NonDlvryDays_nsprefix_ = None + self.NonExpeditedExceptions = NonExpeditedExceptions + self.NonExpeditedExceptions_nsprefix_ = None + self.HFPU = HFPU + self.HFPU_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, NonExpeditedType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if NonExpeditedType.subclass: + return NonExpeditedType.subclass(*args_, **kwargs_) + else: + return NonExpeditedType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_MailClass(self): + return self.MailClass + def set_MailClass(self, MailClass): + self.MailClass = MailClass + def get_NonExpeditedDestType(self): + return self.NonExpeditedDestType + def set_NonExpeditedDestType(self, NonExpeditedDestType): + self.NonExpeditedDestType = NonExpeditedDestType + def get_EAD(self): + return self.EAD + def set_EAD(self, EAD): + self.EAD = EAD + def get_COT(self): + return self.COT + def set_COT(self, COT): + self.COT = COT + def get_SvcStdMsg(self): + return self.SvcStdMsg + def set_SvcStdMsg(self, SvcStdMsg): + self.SvcStdMsg = SvcStdMsg + def get_SvcStdDays(self): + return self.SvcStdDays + def set_SvcStdDays(self, SvcStdDays): + self.SvcStdDays = SvcStdDays + def get_TotDaysDeliver(self): + return self.TotDaysDeliver + def set_TotDaysDeliver(self, TotDaysDeliver): + self.TotDaysDeliver = TotDaysDeliver + def get_SchedDlvryDate(self): + return self.SchedDlvryDate + def set_SchedDlvryDate(self, SchedDlvryDate): + self.SchedDlvryDate = SchedDlvryDate + def get_NonDlvryDays(self): + return self.NonDlvryDays + def set_NonDlvryDays(self, NonDlvryDays): + self.NonDlvryDays = NonDlvryDays + def get_NonExpeditedExceptions(self): + return self.NonExpeditedExceptions + def set_NonExpeditedExceptions(self, NonExpeditedExceptions): + self.NonExpeditedExceptions = NonExpeditedExceptions + def get_HFPU(self): + return self.HFPU + def set_HFPU(self, HFPU): + self.HFPU = HFPU + def has__content(self): + if ( + self.MailClass is not None or + self.NonExpeditedDestType is not None or + self.EAD is not None or + self.COT is not None or + self.SvcStdMsg is not None or + self.SvcStdDays is not None or + self.TotDaysDeliver is not None or + self.SchedDlvryDate is not None or + self.NonDlvryDays is not None or + self.NonExpeditedExceptions is not None or + self.HFPU is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NonExpeditedType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('NonExpeditedType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'NonExpeditedType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NonExpeditedType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='NonExpeditedType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='NonExpeditedType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NonExpeditedType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.MailClass is not None: + namespaceprefix_ = self.MailClass_nsprefix_ + ':' if (UseCapturedNS_ and self.MailClass_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMailClass>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.MailClass, input_name='MailClass'), namespaceprefix_ , eol_)) + if self.NonExpeditedDestType is not None: + namespaceprefix_ = self.NonExpeditedDestType_nsprefix_ + ':' if (UseCapturedNS_ and self.NonExpeditedDestType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNonExpeditedDestType>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.NonExpeditedDestType, input_name='NonExpeditedDestType'), namespaceprefix_ , eol_)) + if self.EAD is not None: + namespaceprefix_ = self.EAD_nsprefix_ + ':' if (UseCapturedNS_ and self.EAD_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEAD>%s%s' % (namespaceprefix_ , self.gds_format_date(self.EAD, input_name='EAD'), namespaceprefix_ , eol_)) + if self.COT is not None: + namespaceprefix_ = self.COT_nsprefix_ + ':' if (UseCapturedNS_ and self.COT_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCOT>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.COT, input_name='COT'), namespaceprefix_ , eol_)) + if self.SvcStdMsg is not None: + namespaceprefix_ = self.SvcStdMsg_nsprefix_ + ':' if (UseCapturedNS_ and self.SvcStdMsg_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSvcStdMsg>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SvcStdMsg), input_name='SvcStdMsg')), namespaceprefix_ , eol_)) + if self.SvcStdDays is not None: + namespaceprefix_ = self.SvcStdDays_nsprefix_ + ':' if (UseCapturedNS_ and self.SvcStdDays_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSvcStdDays>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.SvcStdDays, input_name='SvcStdDays'), namespaceprefix_ , eol_)) + if self.TotDaysDeliver is not None: + namespaceprefix_ = self.TotDaysDeliver_nsprefix_ + ':' if (UseCapturedNS_ and self.TotDaysDeliver_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTotDaysDeliver>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.TotDaysDeliver, input_name='TotDaysDeliver'), namespaceprefix_ , eol_)) + if self.SchedDlvryDate is not None: + namespaceprefix_ = self.SchedDlvryDate_nsprefix_ + ':' if (UseCapturedNS_ and self.SchedDlvryDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSchedDlvryDate>%s%s' % (namespaceprefix_ , self.gds_format_date(self.SchedDlvryDate, input_name='SchedDlvryDate'), namespaceprefix_ , eol_)) + if self.NonDlvryDays is not None: + namespaceprefix_ = self.NonDlvryDays_nsprefix_ + ':' if (UseCapturedNS_ and self.NonDlvryDays_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNonDlvryDays>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.NonDlvryDays, input_name='NonDlvryDays'), namespaceprefix_ , eol_)) + if self.NonExpeditedExceptions is not None: + namespaceprefix_ = self.NonExpeditedExceptions_nsprefix_ + ':' if (UseCapturedNS_ and self.NonExpeditedExceptions_nsprefix_) else '' + self.NonExpeditedExceptions.export(outfile, level, namespaceprefix_, namespacedef_='', name_='NonExpeditedExceptions', pretty_print=pretty_print) + if self.HFPU is not None: + namespaceprefix_ = self.HFPU_nsprefix_ + ':' if (UseCapturedNS_ and self.HFPU_nsprefix_) else '' + self.HFPU.export(outfile, level, namespaceprefix_, namespacedef_='', name_='HFPU', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'MailClass' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'MailClass') + ival_ = self.gds_validate_integer(ival_, node, 'MailClass') + self.MailClass = ival_ + self.MailClass_nsprefix_ = child_.prefix + elif nodeName_ == 'NonExpeditedDestType' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'NonExpeditedDestType') + ival_ = self.gds_validate_integer(ival_, node, 'NonExpeditedDestType') + self.NonExpeditedDestType = ival_ + self.NonExpeditedDestType_nsprefix_ = child_.prefix + elif nodeName_ == 'EAD': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.EAD = dval_ + self.EAD_nsprefix_ = child_.prefix + elif nodeName_ == 'COT' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'COT') + ival_ = self.gds_validate_integer(ival_, node, 'COT') + self.COT = ival_ + self.COT_nsprefix_ = child_.prefix + elif nodeName_ == 'SvcStdMsg': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SvcStdMsg') + value_ = self.gds_validate_string(value_, node, 'SvcStdMsg') + self.SvcStdMsg = value_ + self.SvcStdMsg_nsprefix_ = child_.prefix + elif nodeName_ == 'SvcStdDays' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'SvcStdDays') + ival_ = self.gds_validate_integer(ival_, node, 'SvcStdDays') + self.SvcStdDays = ival_ + self.SvcStdDays_nsprefix_ = child_.prefix + elif nodeName_ == 'TotDaysDeliver' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'TotDaysDeliver') + ival_ = self.gds_validate_integer(ival_, node, 'TotDaysDeliver') + self.TotDaysDeliver = ival_ + self.TotDaysDeliver_nsprefix_ = child_.prefix + elif nodeName_ == 'SchedDlvryDate': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.SchedDlvryDate = dval_ + self.SchedDlvryDate_nsprefix_ = child_.prefix + elif nodeName_ == 'NonDlvryDays' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'NonDlvryDays') + ival_ = self.gds_validate_integer(ival_, node, 'NonDlvryDays') + self.NonDlvryDays = ival_ + self.NonDlvryDays_nsprefix_ = child_.prefix + elif nodeName_ == 'NonExpeditedExceptions': + obj_ = NonExpeditedExceptionsType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.NonExpeditedExceptions = obj_ + obj_.original_tagname_ = 'NonExpeditedExceptions' + elif nodeName_ == 'HFPU': + obj_ = HFPUType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.HFPU = obj_ + obj_.original_tagname_ = 'HFPU' +# end class NonExpeditedType + + +class NonExpeditedExceptionsType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, SunHol=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.SunHol = SunHol + self.SunHol_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, NonExpeditedExceptionsType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if NonExpeditedExceptionsType.subclass: + return NonExpeditedExceptionsType.subclass(*args_, **kwargs_) + else: + return NonExpeditedExceptionsType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_SunHol(self): + return self.SunHol + def set_SunHol(self, SunHol): + self.SunHol = SunHol + def has__content(self): + if ( + self.SunHol is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NonExpeditedExceptionsType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('NonExpeditedExceptionsType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'NonExpeditedExceptionsType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NonExpeditedExceptionsType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='NonExpeditedExceptionsType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='NonExpeditedExceptionsType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NonExpeditedExceptionsType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.SunHol is not None: + namespaceprefix_ = self.SunHol_nsprefix_ + ':' if (UseCapturedNS_ and self.SunHol_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSunHol>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.SunHol, input_name='SunHol'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'SunHol' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'SunHol') + ival_ = self.gds_validate_integer(ival_, node, 'SunHol') + self.SunHol = ival_ + self.SunHol_nsprefix_ = child_.prefix +# end class NonExpeditedExceptionsType + + +class HFPUType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, EAD=None, COT=None, ServiceStandard=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if isinstance(EAD, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(EAD, '%Y-%m-%d').date() + else: + initvalue_ = EAD + self.EAD = initvalue_ + self.EAD_nsprefix_ = None + self.COT = COT + self.COT_nsprefix_ = None + self.ServiceStandard = ServiceStandard + self.ServiceStandard_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, HFPUType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if HFPUType.subclass: + return HFPUType.subclass(*args_, **kwargs_) + else: + return HFPUType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_EAD(self): + return self.EAD + def set_EAD(self, EAD): + self.EAD = EAD + def get_COT(self): + return self.COT + def set_COT(self, COT): + self.COT = COT + def get_ServiceStandard(self): + return self.ServiceStandard + def set_ServiceStandard(self, ServiceStandard): + self.ServiceStandard = ServiceStandard + def has__content(self): + if ( + self.EAD is not None or + self.COT is not None or + self.ServiceStandard is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HFPUType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('HFPUType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'HFPUType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='HFPUType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='HFPUType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='HFPUType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HFPUType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.EAD is not None: + namespaceprefix_ = self.EAD_nsprefix_ + ':' if (UseCapturedNS_ and self.EAD_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEAD>%s%s' % (namespaceprefix_ , self.gds_format_date(self.EAD, input_name='EAD'), namespaceprefix_ , eol_)) + if self.COT is not None: + namespaceprefix_ = self.COT_nsprefix_ + ':' if (UseCapturedNS_ and self.COT_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCOT>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.COT, input_name='COT'), namespaceprefix_ , eol_)) + if self.ServiceStandard is not None: + namespaceprefix_ = self.ServiceStandard_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceStandard_nsprefix_) else '' + self.ServiceStandard.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ServiceStandard', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'EAD': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.EAD = dval_ + self.EAD_nsprefix_ = child_.prefix + elif nodeName_ == 'COT' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'COT') + ival_ = self.gds_validate_integer(ival_, node, 'COT') + self.COT = ival_ + self.COT_nsprefix_ = child_.prefix + elif nodeName_ == 'ServiceStandard': + obj_ = ServiceStandardType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.ServiceStandard = obj_ + obj_.original_tagname_ = 'ServiceStandard' +# end class HFPUType + + +class ServiceStandardType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, SvcStdMsg=None, SvcStdDays=None, Location=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.SvcStdMsg = SvcStdMsg + self.SvcStdMsg_nsprefix_ = None + self.SvcStdDays = SvcStdDays + self.SvcStdDays_nsprefix_ = None + self.Location = Location + self.Location_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ServiceStandardType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ServiceStandardType.subclass: + return ServiceStandardType.subclass(*args_, **kwargs_) + else: + return ServiceStandardType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_SvcStdMsg(self): + return self.SvcStdMsg + def set_SvcStdMsg(self, SvcStdMsg): + self.SvcStdMsg = SvcStdMsg + def get_SvcStdDays(self): + return self.SvcStdDays + def set_SvcStdDays(self, SvcStdDays): + self.SvcStdDays = SvcStdDays + def get_Location(self): + return self.Location + def set_Location(self, Location): + self.Location = Location + def has__content(self): + if ( + self.SvcStdMsg is not None or + self.SvcStdDays is not None or + self.Location is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ServiceStandardType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ServiceStandardType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ServiceStandardType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ServiceStandardType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ServiceStandardType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ServiceStandardType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ServiceStandardType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.SvcStdMsg is not None: + namespaceprefix_ = self.SvcStdMsg_nsprefix_ + ':' if (UseCapturedNS_ and self.SvcStdMsg_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSvcStdMsg>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SvcStdMsg), input_name='SvcStdMsg')), namespaceprefix_ , eol_)) + if self.SvcStdDays is not None: + namespaceprefix_ = self.SvcStdDays_nsprefix_ + ':' if (UseCapturedNS_ and self.SvcStdDays_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSvcStdDays>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.SvcStdDays, input_name='SvcStdDays'), namespaceprefix_ , eol_)) + if self.Location is not None: + namespaceprefix_ = self.Location_nsprefix_ + ':' if (UseCapturedNS_ and self.Location_nsprefix_) else '' + self.Location.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Location', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'SvcStdMsg': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SvcStdMsg') + value_ = self.gds_validate_string(value_, node, 'SvcStdMsg') + self.SvcStdMsg = value_ + self.SvcStdMsg_nsprefix_ = child_.prefix + elif nodeName_ == 'SvcStdDays' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'SvcStdDays') + ival_ = self.gds_validate_integer(ival_, node, 'SvcStdDays') + self.SvcStdDays = ival_ + self.SvcStdDays_nsprefix_ = child_.prefix + elif nodeName_ == 'Location': + obj_ = LocationType1.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Location = obj_ + obj_.original_tagname_ = 'Location' +# end class ServiceStandardType + + +class LocationType1(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, TotDaysDeliver=None, SchedDlvryDate=None, NonDlvryDays=None, RAUName=None, Street=None, ZIP=None, CloseTimes=None, NonExpeditedExceptions=None, City=None, State=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.TotDaysDeliver = TotDaysDeliver + self.TotDaysDeliver_nsprefix_ = None + if isinstance(SchedDlvryDate, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(SchedDlvryDate, '%Y-%m-%d').date() + else: + initvalue_ = SchedDlvryDate + self.SchedDlvryDate = initvalue_ + self.SchedDlvryDate_nsprefix_ = None + self.NonDlvryDays = NonDlvryDays + self.NonDlvryDays_nsprefix_ = None + self.RAUName = RAUName + self.RAUName_nsprefix_ = None + self.Street = Street + self.Street_nsprefix_ = None + self.ZIP = ZIP + self.ZIP_nsprefix_ = None + self.CloseTimes = CloseTimes + self.CloseTimes_nsprefix_ = None + self.NonExpeditedExceptions = NonExpeditedExceptions + self.NonExpeditedExceptions_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, LocationType1) + if subclass is not None: + return subclass(*args_, **kwargs_) + if LocationType1.subclass: + return LocationType1.subclass(*args_, **kwargs_) + else: + return LocationType1(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_TotDaysDeliver(self): + return self.TotDaysDeliver + def set_TotDaysDeliver(self, TotDaysDeliver): + self.TotDaysDeliver = TotDaysDeliver + def get_SchedDlvryDate(self): + return self.SchedDlvryDate + def set_SchedDlvryDate(self, SchedDlvryDate): + self.SchedDlvryDate = SchedDlvryDate + def get_NonDlvryDays(self): + return self.NonDlvryDays + def set_NonDlvryDays(self, NonDlvryDays): + self.NonDlvryDays = NonDlvryDays + def get_RAUName(self): + return self.RAUName + def set_RAUName(self, RAUName): + self.RAUName = RAUName + def get_Street(self): + return self.Street + def set_Street(self, Street): + self.Street = Street + def get_ZIP(self): + return self.ZIP + def set_ZIP(self, ZIP): + self.ZIP = ZIP + def get_CloseTimes(self): + return self.CloseTimes + def set_CloseTimes(self, CloseTimes): + self.CloseTimes = CloseTimes + def get_NonExpeditedExceptions(self): + return self.NonExpeditedExceptions + def set_NonExpeditedExceptions(self, NonExpeditedExceptions): + self.NonExpeditedExceptions = NonExpeditedExceptions + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def has__content(self): + if ( + self.TotDaysDeliver is not None or + self.SchedDlvryDate is not None or + self.NonDlvryDays is not None or + self.RAUName is not None or + self.Street is not None or + self.ZIP is not None or + self.CloseTimes is not None or + self.NonExpeditedExceptions is not None or + self.City is not None or + self.State is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LocationType1', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('LocationType1') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'LocationType1': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LocationType1') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LocationType1', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LocationType1'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LocationType1', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.TotDaysDeliver is not None: + namespaceprefix_ = self.TotDaysDeliver_nsprefix_ + ':' if (UseCapturedNS_ and self.TotDaysDeliver_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTotDaysDeliver>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.TotDaysDeliver, input_name='TotDaysDeliver'), namespaceprefix_ , eol_)) + if self.SchedDlvryDate is not None: + namespaceprefix_ = self.SchedDlvryDate_nsprefix_ + ':' if (UseCapturedNS_ and self.SchedDlvryDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSchedDlvryDate>%s%s' % (namespaceprefix_ , self.gds_format_date(self.SchedDlvryDate, input_name='SchedDlvryDate'), namespaceprefix_ , eol_)) + if self.NonDlvryDays is not None: + namespaceprefix_ = self.NonDlvryDays_nsprefix_ + ':' if (UseCapturedNS_ and self.NonDlvryDays_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sNonDlvryDays>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.NonDlvryDays, input_name='NonDlvryDays'), namespaceprefix_ , eol_)) + if self.RAUName is not None: + namespaceprefix_ = self.RAUName_nsprefix_ + ':' if (UseCapturedNS_ and self.RAUName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRAUName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RAUName), input_name='RAUName')), namespaceprefix_ , eol_)) + if self.Street is not None: + namespaceprefix_ = self.Street_nsprefix_ + ':' if (UseCapturedNS_ and self.Street_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sStreet>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Street), input_name='Street')), namespaceprefix_ , eol_)) + if self.ZIP is not None: + namespaceprefix_ = self.ZIP_nsprefix_ + ':' if (UseCapturedNS_ and self.ZIP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZIP>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ZIP, input_name='ZIP'), namespaceprefix_ , eol_)) + if self.CloseTimes is not None: + namespaceprefix_ = self.CloseTimes_nsprefix_ + ':' if (UseCapturedNS_ and self.CloseTimes_nsprefix_) else '' + self.CloseTimes.export(outfile, level, namespaceprefix_, namespacedef_='', name_='CloseTimes', pretty_print=pretty_print) + if self.NonExpeditedExceptions is not None: + namespaceprefix_ = self.NonExpeditedExceptions_nsprefix_ + ':' if (UseCapturedNS_ and self.NonExpeditedExceptions_nsprefix_) else '' + self.NonExpeditedExceptions.export(outfile, level, namespaceprefix_, namespacedef_='', name_='NonExpeditedExceptions', pretty_print=pretty_print) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'TotDaysDeliver' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'TotDaysDeliver') + ival_ = self.gds_validate_integer(ival_, node, 'TotDaysDeliver') + self.TotDaysDeliver = ival_ + self.TotDaysDeliver_nsprefix_ = child_.prefix + elif nodeName_ == 'SchedDlvryDate': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.SchedDlvryDate = dval_ + self.SchedDlvryDate_nsprefix_ = child_.prefix + elif nodeName_ == 'NonDlvryDays' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'NonDlvryDays') + ival_ = self.gds_validate_integer(ival_, node, 'NonDlvryDays') + self.NonDlvryDays = ival_ + self.NonDlvryDays_nsprefix_ = child_.prefix + elif nodeName_ == 'RAUName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RAUName') + value_ = self.gds_validate_string(value_, node, 'RAUName') + self.RAUName = value_ + self.RAUName_nsprefix_ = child_.prefix + elif nodeName_ == 'Street': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Street') + value_ = self.gds_validate_string(value_, node, 'Street') + self.Street = value_ + self.Street_nsprefix_ = child_.prefix + elif nodeName_ == 'ZIP' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ZIP') + ival_ = self.gds_validate_integer(ival_, node, 'ZIP') + self.ZIP = ival_ + self.ZIP_nsprefix_ = child_.prefix + elif nodeName_ == 'CloseTimes': + obj_ = CloseTimesType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.CloseTimes = obj_ + obj_.original_tagname_ = 'CloseTimes' + elif nodeName_ == 'NonExpeditedExceptions': + obj_ = NonExpeditedExceptionsType2.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.NonExpeditedExceptions = obj_ + obj_.original_tagname_ = 'NonExpeditedExceptions' + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix +# end class LocationType1 + + +class CloseTimesType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, M=None, Tu=None, W=None, Th=None, F=None, Sa=None, Su=None, H=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.M = M + self.M_nsprefix_ = None + self.Tu = Tu + self.Tu_nsprefix_ = None + self.W = W + self.W_nsprefix_ = None + self.Th = Th + self.Th_nsprefix_ = None + self.F = F + self.F_nsprefix_ = None + self.Sa = Sa + self.Sa_nsprefix_ = None + self.Su = Su + self.Su_nsprefix_ = None + self.H = H + self.H_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CloseTimesType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CloseTimesType.subclass: + return CloseTimesType.subclass(*args_, **kwargs_) + else: + return CloseTimesType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_M(self): + return self.M + def set_M(self, M): + self.M = M + def get_Tu(self): + return self.Tu + def set_Tu(self, Tu): + self.Tu = Tu + def get_W(self): + return self.W + def set_W(self, W): + self.W = W + def get_Th(self): + return self.Th + def set_Th(self, Th): + self.Th = Th + def get_F(self): + return self.F + def set_F(self, F): + self.F = F + def get_Sa(self): + return self.Sa + def set_Sa(self, Sa): + self.Sa = Sa + def get_Su(self): + return self.Su + def set_Su(self, Su): + self.Su = Su + def get_H(self): + return self.H + def set_H(self, H): + self.H = H + def has__content(self): + if ( + self.M is not None or + self.Tu is not None or + self.W is not None or + self.Th is not None or + self.F is not None or + self.Sa is not None or + self.Su is not None or + self.H is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CloseTimesType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CloseTimesType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'CloseTimesType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CloseTimesType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CloseTimesType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CloseTimesType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CloseTimesType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.M is not None: + namespaceprefix_ = self.M_nsprefix_ + ':' if (UseCapturedNS_ and self.M_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sM>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.M, input_name='M'), namespaceprefix_ , eol_)) + if self.Tu is not None: + namespaceprefix_ = self.Tu_nsprefix_ + ':' if (UseCapturedNS_ and self.Tu_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTu>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Tu, input_name='Tu'), namespaceprefix_ , eol_)) + if self.W is not None: + namespaceprefix_ = self.W_nsprefix_ + ':' if (UseCapturedNS_ and self.W_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sW>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.W, input_name='W'), namespaceprefix_ , eol_)) + if self.Th is not None: + namespaceprefix_ = self.Th_nsprefix_ + ':' if (UseCapturedNS_ and self.Th_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTh>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Th, input_name='Th'), namespaceprefix_ , eol_)) + if self.F is not None: + namespaceprefix_ = self.F_nsprefix_ + ':' if (UseCapturedNS_ and self.F_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sF>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.F, input_name='F'), namespaceprefix_ , eol_)) + if self.Sa is not None: + namespaceprefix_ = self.Sa_nsprefix_ + ':' if (UseCapturedNS_ and self.Sa_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSa>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Sa, input_name='Sa'), namespaceprefix_ , eol_)) + if self.Su is not None: + namespaceprefix_ = self.Su_nsprefix_ + ':' if (UseCapturedNS_ and self.Su_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSu>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Su, input_name='Su'), namespaceprefix_ , eol_)) + if self.H is not None: + namespaceprefix_ = self.H_nsprefix_ + ':' if (UseCapturedNS_ and self.H_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sH>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.H, input_name='H'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'M' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'M') + ival_ = self.gds_validate_integer(ival_, node, 'M') + self.M = ival_ + self.M_nsprefix_ = child_.prefix + elif nodeName_ == 'Tu' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Tu') + ival_ = self.gds_validate_integer(ival_, node, 'Tu') + self.Tu = ival_ + self.Tu_nsprefix_ = child_.prefix + elif nodeName_ == 'W' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'W') + ival_ = self.gds_validate_integer(ival_, node, 'W') + self.W = ival_ + self.W_nsprefix_ = child_.prefix + elif nodeName_ == 'Th' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Th') + ival_ = self.gds_validate_integer(ival_, node, 'Th') + self.Th = ival_ + self.Th_nsprefix_ = child_.prefix + elif nodeName_ == 'F' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'F') + ival_ = self.gds_validate_integer(ival_, node, 'F') + self.F = ival_ + self.F_nsprefix_ = child_.prefix + elif nodeName_ == 'Sa' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Sa') + ival_ = self.gds_validate_integer(ival_, node, 'Sa') + self.Sa = ival_ + self.Sa_nsprefix_ = child_.prefix + elif nodeName_ == 'Su' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Su') + ival_ = self.gds_validate_integer(ival_, node, 'Su') + self.Su = ival_ + self.Su_nsprefix_ = child_.prefix + elif nodeName_ == 'H' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'H') + ival_ = self.gds_validate_integer(ival_, node, 'H') + self.H = ival_ + self.H_nsprefix_ = child_.prefix +# end class CloseTimesType + + +class NonExpeditedExceptionsType2(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, SunHol=None, Closed=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.SunHol = SunHol + self.SunHol_nsprefix_ = None + self.Closed = Closed + self.Closed_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, NonExpeditedExceptionsType2) + if subclass is not None: + return subclass(*args_, **kwargs_) + if NonExpeditedExceptionsType2.subclass: + return NonExpeditedExceptionsType2.subclass(*args_, **kwargs_) + else: + return NonExpeditedExceptionsType2(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_SunHol(self): + return self.SunHol + def set_SunHol(self, SunHol): + self.SunHol = SunHol + def get_Closed(self): + return self.Closed + def set_Closed(self, Closed): + self.Closed = Closed + def has__content(self): + if ( + self.SunHol is not None or + self.Closed is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NonExpeditedExceptionsType2', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('NonExpeditedExceptionsType2') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'NonExpeditedExceptionsType2': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NonExpeditedExceptionsType2') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='NonExpeditedExceptionsType2', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='NonExpeditedExceptionsType2'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NonExpeditedExceptionsType2', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.SunHol is not None: + namespaceprefix_ = self.SunHol_nsprefix_ + ':' if (UseCapturedNS_ and self.SunHol_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSunHol>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.SunHol, input_name='SunHol'), namespaceprefix_ , eol_)) + if self.Closed is not None: + namespaceprefix_ = self.Closed_nsprefix_ + ':' if (UseCapturedNS_ and self.Closed_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sClosed>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Closed, input_name='Closed'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'SunHol' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'SunHol') + ival_ = self.gds_validate_integer(ival_, node, 'SunHol') + self.SunHol = ival_ + self.SunHol_nsprefix_ = child_.prefix + elif nodeName_ == 'Closed' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Closed') + ival_ = self.gds_validate_integer(ival_, node, 'Closed') + self.Closed = ival_ + self.Closed_nsprefix_ = child_.prefix +# end class NonExpeditedExceptionsType2 + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SDCGetLocationsResponse' + rootClass = SDCGetLocationsResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SDCGetLocationsResponse' + rootClass = SDCGetLocationsResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SDCGetLocationsResponse' + rootClass = SDCGetLocationsResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SDCGetLocationsResponse' + rootClass = SDCGetLocationsResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from sdc_get_locations_response import *\n\n') + sys.stdout.write('import sdc_get_locations_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "CloseTimesType", + "CommitmentType", + "ExpeditedType", + "HFPUType", + "LocationType", + "LocationType1", + "NonExpeditedExceptionsType", + "NonExpeditedExceptionsType2", + "NonExpeditedType", + "SDCGetLocationsResponse", + "ServiceStandardType" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/standard_b_request.py b/modules/connectors/usps_international/karrio/schemas/usps_international/standard_b_request.py new file mode 100644 index 0000000000..6675a579fa --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/standard_b_request.py @@ -0,0 +1,1386 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:08 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/standard_b_request.py') +# +# Command line arguments: +# ./schemas/StandardBRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/standard_b_request.py" ./schemas/StandardBRequest.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class StandardBRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, OriginZip=None, DestinationZip=None, DestinationType=None, ClientType=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.OriginZip = OriginZip + self.OriginZip_nsprefix_ = None + self.DestinationZip = DestinationZip + self.DestinationZip_nsprefix_ = None + self.DestinationType = DestinationType + self.DestinationType_nsprefix_ = None + self.ClientType = ClientType + self.ClientType_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, StandardBRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if StandardBRequest.subclass: + return StandardBRequest.subclass(*args_, **kwargs_) + else: + return StandardBRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_OriginZip(self): + return self.OriginZip + def set_OriginZip(self, OriginZip): + self.OriginZip = OriginZip + def get_DestinationZip(self): + return self.DestinationZip + def set_DestinationZip(self, DestinationZip): + self.DestinationZip = DestinationZip + def get_DestinationType(self): + return self.DestinationType + def set_DestinationType(self, DestinationType): + self.DestinationType = DestinationType + def get_ClientType(self): + return self.ClientType + def set_ClientType(self, ClientType): + self.ClientType = ClientType + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.OriginZip is not None or + self.DestinationZip is not None or + self.DestinationType is not None or + self.ClientType is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='StandardBRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('StandardBRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'StandardBRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='StandardBRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='StandardBRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='StandardBRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='StandardBRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.OriginZip is not None: + namespaceprefix_ = self.OriginZip_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.OriginZip, input_name='OriginZip'), namespaceprefix_ , eol_)) + if self.DestinationZip is not None: + namespaceprefix_ = self.DestinationZip_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.DestinationZip, input_name='DestinationZip'), namespaceprefix_ , eol_)) + if self.DestinationType is not None: + namespaceprefix_ = self.DestinationType_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationType>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.DestinationType, input_name='DestinationType'), namespaceprefix_ , eol_)) + if self.ClientType is not None: + namespaceprefix_ = self.ClientType_nsprefix_ + ':' if (UseCapturedNS_ and self.ClientType_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sClientType>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.ClientType, input_name='ClientType'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'OriginZip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'OriginZip') + ival_ = self.gds_validate_integer(ival_, node, 'OriginZip') + self.OriginZip = ival_ + self.OriginZip_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationZip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'DestinationZip') + ival_ = self.gds_validate_integer(ival_, node, 'DestinationZip') + self.DestinationZip = ival_ + self.DestinationZip_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationType' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'DestinationType') + ival_ = self.gds_validate_integer(ival_, node, 'DestinationType') + self.DestinationType = ival_ + self.DestinationType_nsprefix_ = child_.prefix + elif nodeName_ == 'ClientType' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'ClientType') + ival_ = self.gds_validate_integer(ival_, node, 'ClientType') + self.ClientType = ival_ + self.ClientType_nsprefix_ = child_.prefix +# end class StandardBRequest + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'StandardBRequest' + rootClass = StandardBRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'StandardBRequest' + rootClass = StandardBRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'StandardBRequest' + rootClass = StandardBRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'StandardBRequest' + rootClass = StandardBRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from standard_b_request import *\n\n') + sys.stdout.write('import standard_b_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "StandardBRequest" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/standard_b_response.py b/modules/connectors/usps_international/karrio/schemas/usps_international/standard_b_response.py new file mode 100644 index 0000000000..9369aa8fe3 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/standard_b_response.py @@ -0,0 +1,1396 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:09 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/standard_b_response.py') +# +# Command line arguments: +# ./schemas/StandardBResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/standard_b_response.py" ./schemas/StandardBResponse.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class StandardBResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, OriginZip=None, DestinationZip=None, Days=None, Message=None, EffectiveAcceptanceDate=None, ScheduledDeliveryDate=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.OriginZip = OriginZip + self.OriginZip_nsprefix_ = None + self.DestinationZip = DestinationZip + self.DestinationZip_nsprefix_ = None + self.Days = Days + self.Days_nsprefix_ = None + self.Message = Message + self.Message_nsprefix_ = None + self.EffectiveAcceptanceDate = EffectiveAcceptanceDate + self.EffectiveAcceptanceDate_nsprefix_ = None + self.ScheduledDeliveryDate = ScheduledDeliveryDate + self.ScheduledDeliveryDate_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, StandardBResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if StandardBResponse.subclass: + return StandardBResponse.subclass(*args_, **kwargs_) + else: + return StandardBResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_OriginZip(self): + return self.OriginZip + def set_OriginZip(self, OriginZip): + self.OriginZip = OriginZip + def get_DestinationZip(self): + return self.DestinationZip + def set_DestinationZip(self, DestinationZip): + self.DestinationZip = DestinationZip + def get_Days(self): + return self.Days + def set_Days(self, Days): + self.Days = Days + def get_Message(self): + return self.Message + def set_Message(self, Message): + self.Message = Message + def get_EffectiveAcceptanceDate(self): + return self.EffectiveAcceptanceDate + def set_EffectiveAcceptanceDate(self, EffectiveAcceptanceDate): + self.EffectiveAcceptanceDate = EffectiveAcceptanceDate + def get_ScheduledDeliveryDate(self): + return self.ScheduledDeliveryDate + def set_ScheduledDeliveryDate(self, ScheduledDeliveryDate): + self.ScheduledDeliveryDate = ScheduledDeliveryDate + def has__content(self): + if ( + self.OriginZip is not None or + self.DestinationZip is not None or + self.Days is not None or + self.Message is not None or + self.EffectiveAcceptanceDate is not None or + self.ScheduledDeliveryDate is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='StandardBResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('StandardBResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'StandardBResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='StandardBResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='StandardBResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='StandardBResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='StandardBResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.OriginZip is not None: + namespaceprefix_ = self.OriginZip_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.OriginZip, input_name='OriginZip'), namespaceprefix_ , eol_)) + if self.DestinationZip is not None: + namespaceprefix_ = self.DestinationZip_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.DestinationZip, input_name='DestinationZip'), namespaceprefix_ , eol_)) + if self.Days is not None: + namespaceprefix_ = self.Days_nsprefix_ + ':' if (UseCapturedNS_ and self.Days_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDays>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Days, input_name='Days'), namespaceprefix_ , eol_)) + if self.Message is not None: + namespaceprefix_ = self.Message_nsprefix_ + ':' if (UseCapturedNS_ and self.Message_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMessage>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Message), input_name='Message')), namespaceprefix_ , eol_)) + if self.EffectiveAcceptanceDate is not None: + namespaceprefix_ = self.EffectiveAcceptanceDate_nsprefix_ + ':' if (UseCapturedNS_ and self.EffectiveAcceptanceDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEffectiveAcceptanceDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EffectiveAcceptanceDate), input_name='EffectiveAcceptanceDate')), namespaceprefix_ , eol_)) + if self.ScheduledDeliveryDate is not None: + namespaceprefix_ = self.ScheduledDeliveryDate_nsprefix_ + ':' if (UseCapturedNS_ and self.ScheduledDeliveryDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sScheduledDeliveryDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ScheduledDeliveryDate), input_name='ScheduledDeliveryDate')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'OriginZip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'OriginZip') + ival_ = self.gds_validate_integer(ival_, node, 'OriginZip') + self.OriginZip = ival_ + self.OriginZip_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationZip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'DestinationZip') + ival_ = self.gds_validate_integer(ival_, node, 'DestinationZip') + self.DestinationZip = ival_ + self.DestinationZip_nsprefix_ = child_.prefix + elif nodeName_ == 'Days' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Days') + ival_ = self.gds_validate_integer(ival_, node, 'Days') + self.Days = ival_ + self.Days_nsprefix_ = child_.prefix + elif nodeName_ == 'Message': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Message') + value_ = self.gds_validate_string(value_, node, 'Message') + self.Message = value_ + self.Message_nsprefix_ = child_.prefix + elif nodeName_ == 'EffectiveAcceptanceDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EffectiveAcceptanceDate') + value_ = self.gds_validate_string(value_, node, 'EffectiveAcceptanceDate') + self.EffectiveAcceptanceDate = value_ + self.EffectiveAcceptanceDate_nsprefix_ = child_.prefix + elif nodeName_ == 'ScheduledDeliveryDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ScheduledDeliveryDate') + value_ = self.gds_validate_string(value_, node, 'ScheduledDeliveryDate') + self.ScheduledDeliveryDate = value_ + self.ScheduledDeliveryDate_nsprefix_ = child_.prefix +# end class StandardBResponse + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'StandardBResponse' + rootClass = StandardBResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'StandardBResponse' + rootClass = StandardBResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'StandardBResponse' + rootClass = StandardBResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'StandardBResponse' + rootClass = StandardBResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from standard_b_response import *\n\n') + sys.stdout.write('import standard_b_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "StandardBResponse" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/track_field_request.py b/modules/connectors/usps_international/karrio/schemas/usps_international/track_field_request.py new file mode 100644 index 0000000000..7f67ecd4ac --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/track_field_request.py @@ -0,0 +1,1520 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:09 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/track_field_request.py') +# +# Command line arguments: +# ./schemas/TrackFieldRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/track_field_request.py" ./schemas/TrackFieldRequest.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class TrackFieldRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, Revision=None, ClientIp=None, SourceId=None, TrackID=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.Revision = Revision + self.Revision_nsprefix_ = None + self.ClientIp = ClientIp + self.ClientIp_nsprefix_ = None + self.SourceId = SourceId + self.SourceId_nsprefix_ = None + if TrackID is None: + self.TrackID = [] + else: + self.TrackID = TrackID + self.TrackID_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, TrackFieldRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if TrackFieldRequest.subclass: + return TrackFieldRequest.subclass(*args_, **kwargs_) + else: + return TrackFieldRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Revision(self): + return self.Revision + def set_Revision(self, Revision): + self.Revision = Revision + def get_ClientIp(self): + return self.ClientIp + def set_ClientIp(self, ClientIp): + self.ClientIp = ClientIp + def get_SourceId(self): + return self.SourceId + def set_SourceId(self, SourceId): + self.SourceId = SourceId + def get_TrackID(self): + return self.TrackID + def set_TrackID(self, TrackID): + self.TrackID = TrackID + def add_TrackID(self, value): + self.TrackID.append(value) + def insert_TrackID_at(self, index, value): + self.TrackID.insert(index, value) + def replace_TrackID_at(self, index, value): + self.TrackID[index] = value + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.Revision is not None or + self.ClientIp is not None or + self.SourceId is not None or + self.TrackID + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackFieldRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('TrackFieldRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'TrackFieldRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TrackFieldRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TrackFieldRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TrackFieldRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackFieldRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Revision is not None: + namespaceprefix_ = self.Revision_nsprefix_ + ':' if (UseCapturedNS_ and self.Revision_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRevision>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Revision), input_name='Revision')), namespaceprefix_ , eol_)) + if self.ClientIp is not None: + namespaceprefix_ = self.ClientIp_nsprefix_ + ':' if (UseCapturedNS_ and self.ClientIp_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sClientIp>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ClientIp), input_name='ClientIp')), namespaceprefix_ , eol_)) + if self.SourceId is not None: + namespaceprefix_ = self.SourceId_nsprefix_ + ':' if (UseCapturedNS_ and self.SourceId_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sSourceId>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SourceId), input_name='SourceId')), namespaceprefix_ , eol_)) + for TrackID_ in self.TrackID: + namespaceprefix_ = self.TrackID_nsprefix_ + ':' if (UseCapturedNS_ and self.TrackID_nsprefix_) else '' + TrackID_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TrackID', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Revision': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Revision') + value_ = self.gds_validate_string(value_, node, 'Revision') + self.Revision = value_ + self.Revision_nsprefix_ = child_.prefix + elif nodeName_ == 'ClientIp': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ClientIp') + value_ = self.gds_validate_string(value_, node, 'ClientIp') + self.ClientIp = value_ + self.ClientIp_nsprefix_ = child_.prefix + elif nodeName_ == 'SourceId': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'SourceId') + value_ = self.gds_validate_string(value_, node, 'SourceId') + self.SourceId = value_ + self.SourceId_nsprefix_ = child_.prefix + elif nodeName_ == 'TrackID': + obj_ = TrackIDType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.TrackID.append(obj_) + obj_.original_tagname_ = 'TrackID' +# end class TrackFieldRequest + + +class TrackIDType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ID=None, DestinationZipCode=None, MailingDate=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ID = _cast(None, ID) + self.ID_nsprefix_ = None + self.DestinationZipCode = DestinationZipCode + self.DestinationZipCode_nsprefix_ = None + if isinstance(MailingDate, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(MailingDate, '%Y-%m-%d').date() + else: + initvalue_ = MailingDate + self.MailingDate = initvalue_ + self.MailingDate_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, TrackIDType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if TrackIDType.subclass: + return TrackIDType.subclass(*args_, **kwargs_) + else: + return TrackIDType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_DestinationZipCode(self): + return self.DestinationZipCode + def set_DestinationZipCode(self, DestinationZipCode): + self.DestinationZipCode = DestinationZipCode + def get_MailingDate(self): + return self.MailingDate + def set_MailingDate(self, MailingDate): + self.MailingDate = MailingDate + def get_ID(self): + return self.ID + def set_ID(self, ID): + self.ID = ID + def has__content(self): + if ( + self.DestinationZipCode is not None or + self.MailingDate is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackIDType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('TrackIDType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'TrackIDType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TrackIDType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TrackIDType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TrackIDType'): + if self.ID is not None and 'ID' not in already_processed: + already_processed.add('ID') + outfile.write(' ID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ID), input_name='ID')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackIDType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.DestinationZipCode is not None: + namespaceprefix_ = self.DestinationZipCode_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationZipCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationZipCode>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.DestinationZipCode, input_name='DestinationZipCode'), namespaceprefix_ , eol_)) + if self.MailingDate is not None: + namespaceprefix_ = self.MailingDate_nsprefix_ + ':' if (UseCapturedNS_ and self.MailingDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMailingDate>%s%s' % (namespaceprefix_ , self.gds_format_date(self.MailingDate, input_name='MailingDate'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ID', node) + if value is not None and 'ID' not in already_processed: + already_processed.add('ID') + self.ID = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'DestinationZipCode' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'DestinationZipCode') + ival_ = self.gds_validate_integer(ival_, node, 'DestinationZipCode') + self.DestinationZipCode = ival_ + self.DestinationZipCode_nsprefix_ = child_.prefix + elif nodeName_ == 'MailingDate': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.MailingDate = dval_ + self.MailingDate_nsprefix_ = child_.prefix +# end class TrackIDType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'TrackFieldRequest' + rootClass = TrackFieldRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'TrackFieldRequest' + rootClass = TrackFieldRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'TrackFieldRequest' + rootClass = TrackFieldRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'TrackFieldRequest' + rootClass = TrackFieldRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from track_field_request import *\n\n') + sys.stdout.write('import track_field_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "TrackFieldRequest", + "TrackIDType" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/track_request.py b/modules/connectors/usps_international/karrio/schemas/usps_international/track_request.py new file mode 100644 index 0000000000..03329873ef --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/track_request.py @@ -0,0 +1,1432 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:09 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/track_request.py') +# +# Command line arguments: +# ./schemas/TrackRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/track_request.py" ./schemas/TrackRequest.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class TrackRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, TrackID=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + if TrackID is None: + self.TrackID = [] + else: + self.TrackID = TrackID + self.TrackID_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, TrackRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if TrackRequest.subclass: + return TrackRequest.subclass(*args_, **kwargs_) + else: + return TrackRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_TrackID(self): + return self.TrackID + def set_TrackID(self, TrackID): + self.TrackID = TrackID + def add_TrackID(self, value): + self.TrackID.append(value) + def insert_TrackID_at(self, index, value): + self.TrackID.insert(index, value) + def replace_TrackID_at(self, index, value): + self.TrackID[index] = value + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.TrackID + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('TrackRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'TrackRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TrackRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TrackRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TrackRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for TrackID_ in self.TrackID: + namespaceprefix_ = self.TrackID_nsprefix_ + ':' if (UseCapturedNS_ and self.TrackID_nsprefix_) else '' + TrackID_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TrackID', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'TrackID': + obj_ = TrackIDType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.TrackID.append(obj_) + obj_.original_tagname_ = 'TrackID' +# end class TrackRequest + + +class TrackIDType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ID=None, valueOf_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ID = _cast(None, ID) + self.ID_nsprefix_ = None + self.valueOf_ = valueOf_ + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, TrackIDType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if TrackIDType.subclass: + return TrackIDType.subclass(*args_, **kwargs_) + else: + return TrackIDType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ID(self): + return self.ID + def set_ID(self, ID): + self.ID = ID + def get_valueOf_(self): return self.valueOf_ + def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_ + def has__content(self): + if ( + (1 if type(self.valueOf_) in [int,float] else self.valueOf_) + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackIDType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('TrackIDType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'TrackIDType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TrackIDType') + outfile.write('>') + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_, pretty_print=pretty_print) + outfile.write(self.convert_unicode(self.valueOf_)) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TrackIDType'): + if self.ID is not None and 'ID' not in already_processed: + already_processed.add('ID') + outfile.write(' ID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ID), input_name='ID')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackIDType', fromsubclass_=False, pretty_print=True): + pass + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ID', node) + if value is not None and 'ID' not in already_processed: + already_processed.add('ID') + self.ID = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + pass +# end class TrackIDType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'TrackRequest' + rootClass = TrackRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'TrackRequest' + rootClass = TrackRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'TrackRequest' + rootClass = TrackRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'TrackRequest' + rootClass = TrackRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from track_request import *\n\n') + sys.stdout.write('import track_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "TrackIDType", + "TrackRequest" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/track_response.py b/modules/connectors/usps_international/karrio/schemas/usps_international/track_response.py new file mode 100644 index 0000000000..f1d7f95e75 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/track_response.py @@ -0,0 +1,3349 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:09 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/track_response.py') +# +# Command line arguments: +# ./schemas/TrackResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/track_response.py" ./schemas/TrackResponse.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class TrackResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, TrackInfo=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if TrackInfo is None: + self.TrackInfo = [] + else: + self.TrackInfo = TrackInfo + self.TrackInfo_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, TrackResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if TrackResponse.subclass: + return TrackResponse.subclass(*args_, **kwargs_) + else: + return TrackResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_TrackInfo(self): + return self.TrackInfo + def set_TrackInfo(self, TrackInfo): + self.TrackInfo = TrackInfo + def add_TrackInfo(self, value): + self.TrackInfo.append(value) + def insert_TrackInfo_at(self, index, value): + self.TrackInfo.insert(index, value) + def replace_TrackInfo_at(self, index, value): + self.TrackInfo[index] = value + def has__content(self): + if ( + self.TrackInfo + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('TrackResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'TrackResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TrackResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TrackResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TrackResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for TrackInfo_ in self.TrackInfo: + namespaceprefix_ = self.TrackInfo_nsprefix_ + ':' if (UseCapturedNS_ and self.TrackInfo_nsprefix_) else '' + TrackInfo_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TrackInfo', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'TrackInfo': + obj_ = TrackInfoType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.TrackInfo.append(obj_) + obj_.original_tagname_ = 'TrackInfo' +# end class TrackResponse + + +class TrackInfoType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ID=None, AdditionalInfo=None, ADPScripting=None, ARCHDATA=None, ArchiveRestoreInfo=None, AssociatedLabel=None, Class=None, ClassOfMailCode=None, DeliveryNotificationDate=None, DestinationCity=None, DestinationCountryCode=None, DestinationState=None, DestinationZip=None, EditedLabelID=None, EmailEnabled=None, ExpectedDeliveryDate=None, ExpectedDeliveryTime=None, GuaranteedDeliveryDate=None, GuaranteedDeliveryTime=None, GuaranteedDetails=None, KahalaIndicator=None, MailTypeCode=None, MPDATE=None, MPSUFFIX=None, OriginCity=None, OriginCountryCode=None, OriginState=None, OriginZip=None, PodEnabled=None, PredictedDeliveryDate=None, PredictedDeliveryTime=None, PDWStart=None, PDWEnd=None, RelatedRRID=None, RestoreEnabled=None, RRAMenabled=None, RreEnabled=None, Service=None, ServiceTypeCode=None, Status=None, StatusCategory=None, StatusSummary=None, TABLECODE=None, TpodEnabled=None, ValueofArticle=None, EnabledNotificationRequests=None, TrackSummary=None, TrackDetail=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ID = _cast(None, ID) + self.ID_nsprefix_ = None + self.AdditionalInfo = AdditionalInfo + self.AdditionalInfo_nsprefix_ = None + self.ADPScripting = ADPScripting + self.ADPScripting_nsprefix_ = None + self.ARCHDATA = ARCHDATA + self.ARCHDATA_nsprefix_ = None + self.ArchiveRestoreInfo = ArchiveRestoreInfo + self.ArchiveRestoreInfo_nsprefix_ = None + self.AssociatedLabel = AssociatedLabel + self.AssociatedLabel_nsprefix_ = None + self.Class = Class + self.Class_nsprefix_ = None + self.ClassOfMailCode = ClassOfMailCode + self.ClassOfMailCode_nsprefix_ = None + self.DeliveryNotificationDate = DeliveryNotificationDate + self.DeliveryNotificationDate_nsprefix_ = None + self.DestinationCity = DestinationCity + self.DestinationCity_nsprefix_ = None + self.DestinationCountryCode = DestinationCountryCode + self.DestinationCountryCode_nsprefix_ = None + self.DestinationState = DestinationState + self.DestinationState_nsprefix_ = None + self.DestinationZip = DestinationZip + self.DestinationZip_nsprefix_ = None + self.EditedLabelID = EditedLabelID + self.EditedLabelID_nsprefix_ = None + self.EmailEnabled = EmailEnabled + self.EmailEnabled_nsprefix_ = None + self.ExpectedDeliveryDate = ExpectedDeliveryDate + self.ExpectedDeliveryDate_nsprefix_ = None + self.ExpectedDeliveryTime = ExpectedDeliveryTime + self.ExpectedDeliveryTime_nsprefix_ = None + self.GuaranteedDeliveryDate = GuaranteedDeliveryDate + self.GuaranteedDeliveryDate_nsprefix_ = None + self.GuaranteedDeliveryTime = GuaranteedDeliveryTime + self.GuaranteedDeliveryTime_nsprefix_ = None + self.GuaranteedDetails = GuaranteedDetails + self.GuaranteedDetails_nsprefix_ = None + self.KahalaIndicator = KahalaIndicator + self.KahalaIndicator_nsprefix_ = None + self.MailTypeCode = MailTypeCode + self.MailTypeCode_nsprefix_ = None + self.MPDATE = MPDATE + self.MPDATE_nsprefix_ = None + self.MPSUFFIX = MPSUFFIX + self.MPSUFFIX_nsprefix_ = None + self.OriginCity = OriginCity + self.OriginCity_nsprefix_ = None + self.OriginCountryCode = OriginCountryCode + self.OriginCountryCode_nsprefix_ = None + self.OriginState = OriginState + self.OriginState_nsprefix_ = None + self.OriginZip = OriginZip + self.OriginZip_nsprefix_ = None + self.PodEnabled = PodEnabled + self.PodEnabled_nsprefix_ = None + self.PredictedDeliveryDate = PredictedDeliveryDate + self.PredictedDeliveryDate_nsprefix_ = None + self.PredictedDeliveryTime = PredictedDeliveryTime + self.PredictedDeliveryTime_nsprefix_ = None + self.PDWStart = PDWStart + self.PDWStart_nsprefix_ = None + self.PDWEnd = PDWEnd + self.PDWEnd_nsprefix_ = None + self.RelatedRRID = RelatedRRID + self.RelatedRRID_nsprefix_ = None + self.RestoreEnabled = RestoreEnabled + self.RestoreEnabled_nsprefix_ = None + self.RRAMenabled = RRAMenabled + self.RRAMenabled_nsprefix_ = None + self.RreEnabled = RreEnabled + self.RreEnabled_nsprefix_ = None + self.Service = Service + self.Service_nsprefix_ = None + self.ServiceTypeCode = ServiceTypeCode + self.ServiceTypeCode_nsprefix_ = None + self.Status = Status + self.Status_nsprefix_ = None + self.StatusCategory = StatusCategory + self.StatusCategory_nsprefix_ = None + self.StatusSummary = StatusSummary + self.StatusSummary_nsprefix_ = None + self.TABLECODE = TABLECODE + self.TABLECODE_nsprefix_ = None + self.TpodEnabled = TpodEnabled + self.TpodEnabled_nsprefix_ = None + self.ValueofArticle = ValueofArticle + self.ValueofArticle_nsprefix_ = None + self.EnabledNotificationRequests = EnabledNotificationRequests + self.EnabledNotificationRequests_nsprefix_ = None + self.TrackSummary = TrackSummary + self.TrackSummary_nsprefix_ = None + if TrackDetail is None: + self.TrackDetail = [] + else: + self.TrackDetail = TrackDetail + self.TrackDetail_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, TrackInfoType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if TrackInfoType.subclass: + return TrackInfoType.subclass(*args_, **kwargs_) + else: + return TrackInfoType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_AdditionalInfo(self): + return self.AdditionalInfo + def set_AdditionalInfo(self, AdditionalInfo): + self.AdditionalInfo = AdditionalInfo + def get_ADPScripting(self): + return self.ADPScripting + def set_ADPScripting(self, ADPScripting): + self.ADPScripting = ADPScripting + def get_ARCHDATA(self): + return self.ARCHDATA + def set_ARCHDATA(self, ARCHDATA): + self.ARCHDATA = ARCHDATA + def get_ArchiveRestoreInfo(self): + return self.ArchiveRestoreInfo + def set_ArchiveRestoreInfo(self, ArchiveRestoreInfo): + self.ArchiveRestoreInfo = ArchiveRestoreInfo + def get_AssociatedLabel(self): + return self.AssociatedLabel + def set_AssociatedLabel(self, AssociatedLabel): + self.AssociatedLabel = AssociatedLabel + def get_Class(self): + return self.Class + def set_Class(self, Class): + self.Class = Class + def get_ClassOfMailCode(self): + return self.ClassOfMailCode + def set_ClassOfMailCode(self, ClassOfMailCode): + self.ClassOfMailCode = ClassOfMailCode + def get_DeliveryNotificationDate(self): + return self.DeliveryNotificationDate + def set_DeliveryNotificationDate(self, DeliveryNotificationDate): + self.DeliveryNotificationDate = DeliveryNotificationDate + def get_DestinationCity(self): + return self.DestinationCity + def set_DestinationCity(self, DestinationCity): + self.DestinationCity = DestinationCity + def get_DestinationCountryCode(self): + return self.DestinationCountryCode + def set_DestinationCountryCode(self, DestinationCountryCode): + self.DestinationCountryCode = DestinationCountryCode + def get_DestinationState(self): + return self.DestinationState + def set_DestinationState(self, DestinationState): + self.DestinationState = DestinationState + def get_DestinationZip(self): + return self.DestinationZip + def set_DestinationZip(self, DestinationZip): + self.DestinationZip = DestinationZip + def get_EditedLabelID(self): + return self.EditedLabelID + def set_EditedLabelID(self, EditedLabelID): + self.EditedLabelID = EditedLabelID + def get_EmailEnabled(self): + return self.EmailEnabled + def set_EmailEnabled(self, EmailEnabled): + self.EmailEnabled = EmailEnabled + def get_ExpectedDeliveryDate(self): + return self.ExpectedDeliveryDate + def set_ExpectedDeliveryDate(self, ExpectedDeliveryDate): + self.ExpectedDeliveryDate = ExpectedDeliveryDate + def get_ExpectedDeliveryTime(self): + return self.ExpectedDeliveryTime + def set_ExpectedDeliveryTime(self, ExpectedDeliveryTime): + self.ExpectedDeliveryTime = ExpectedDeliveryTime + def get_GuaranteedDeliveryDate(self): + return self.GuaranteedDeliveryDate + def set_GuaranteedDeliveryDate(self, GuaranteedDeliveryDate): + self.GuaranteedDeliveryDate = GuaranteedDeliveryDate + def get_GuaranteedDeliveryTime(self): + return self.GuaranteedDeliveryTime + def set_GuaranteedDeliveryTime(self, GuaranteedDeliveryTime): + self.GuaranteedDeliveryTime = GuaranteedDeliveryTime + def get_GuaranteedDetails(self): + return self.GuaranteedDetails + def set_GuaranteedDetails(self, GuaranteedDetails): + self.GuaranteedDetails = GuaranteedDetails + def get_KahalaIndicator(self): + return self.KahalaIndicator + def set_KahalaIndicator(self, KahalaIndicator): + self.KahalaIndicator = KahalaIndicator + def get_MailTypeCode(self): + return self.MailTypeCode + def set_MailTypeCode(self, MailTypeCode): + self.MailTypeCode = MailTypeCode + def get_MPDATE(self): + return self.MPDATE + def set_MPDATE(self, MPDATE): + self.MPDATE = MPDATE + def get_MPSUFFIX(self): + return self.MPSUFFIX + def set_MPSUFFIX(self, MPSUFFIX): + self.MPSUFFIX = MPSUFFIX + def get_OriginCity(self): + return self.OriginCity + def set_OriginCity(self, OriginCity): + self.OriginCity = OriginCity + def get_OriginCountryCode(self): + return self.OriginCountryCode + def set_OriginCountryCode(self, OriginCountryCode): + self.OriginCountryCode = OriginCountryCode + def get_OriginState(self): + return self.OriginState + def set_OriginState(self, OriginState): + self.OriginState = OriginState + def get_OriginZip(self): + return self.OriginZip + def set_OriginZip(self, OriginZip): + self.OriginZip = OriginZip + def get_PodEnabled(self): + return self.PodEnabled + def set_PodEnabled(self, PodEnabled): + self.PodEnabled = PodEnabled + def get_PredictedDeliveryDate(self): + return self.PredictedDeliveryDate + def set_PredictedDeliveryDate(self, PredictedDeliveryDate): + self.PredictedDeliveryDate = PredictedDeliveryDate + def get_PredictedDeliveryTime(self): + return self.PredictedDeliveryTime + def set_PredictedDeliveryTime(self, PredictedDeliveryTime): + self.PredictedDeliveryTime = PredictedDeliveryTime + def get_PDWStart(self): + return self.PDWStart + def set_PDWStart(self, PDWStart): + self.PDWStart = PDWStart + def get_PDWEnd(self): + return self.PDWEnd + def set_PDWEnd(self, PDWEnd): + self.PDWEnd = PDWEnd + def get_RelatedRRID(self): + return self.RelatedRRID + def set_RelatedRRID(self, RelatedRRID): + self.RelatedRRID = RelatedRRID + def get_RestoreEnabled(self): + return self.RestoreEnabled + def set_RestoreEnabled(self, RestoreEnabled): + self.RestoreEnabled = RestoreEnabled + def get_RRAMenabled(self): + return self.RRAMenabled + def set_RRAMenabled(self, RRAMenabled): + self.RRAMenabled = RRAMenabled + def get_RreEnabled(self): + return self.RreEnabled + def set_RreEnabled(self, RreEnabled): + self.RreEnabled = RreEnabled + def get_Service(self): + return self.Service + def set_Service(self, Service): + self.Service = Service + def get_ServiceTypeCode(self): + return self.ServiceTypeCode + def set_ServiceTypeCode(self, ServiceTypeCode): + self.ServiceTypeCode = ServiceTypeCode + def get_Status(self): + return self.Status + def set_Status(self, Status): + self.Status = Status + def get_StatusCategory(self): + return self.StatusCategory + def set_StatusCategory(self, StatusCategory): + self.StatusCategory = StatusCategory + def get_StatusSummary(self): + return self.StatusSummary + def set_StatusSummary(self, StatusSummary): + self.StatusSummary = StatusSummary + def get_TABLECODE(self): + return self.TABLECODE + def set_TABLECODE(self, TABLECODE): + self.TABLECODE = TABLECODE + def get_TpodEnabled(self): + return self.TpodEnabled + def set_TpodEnabled(self, TpodEnabled): + self.TpodEnabled = TpodEnabled + def get_ValueofArticle(self): + return self.ValueofArticle + def set_ValueofArticle(self, ValueofArticle): + self.ValueofArticle = ValueofArticle + def get_EnabledNotificationRequests(self): + return self.EnabledNotificationRequests + def set_EnabledNotificationRequests(self, EnabledNotificationRequests): + self.EnabledNotificationRequests = EnabledNotificationRequests + def get_TrackSummary(self): + return self.TrackSummary + def set_TrackSummary(self, TrackSummary): + self.TrackSummary = TrackSummary + def get_TrackDetail(self): + return self.TrackDetail + def set_TrackDetail(self, TrackDetail): + self.TrackDetail = TrackDetail + def add_TrackDetail(self, value): + self.TrackDetail.append(value) + def insert_TrackDetail_at(self, index, value): + self.TrackDetail.insert(index, value) + def replace_TrackDetail_at(self, index, value): + self.TrackDetail[index] = value + def get_ID(self): + return self.ID + def set_ID(self, ID): + self.ID = ID + def has__content(self): + if ( + self.AdditionalInfo is not None or + self.ADPScripting is not None or + self.ARCHDATA is not None or + self.ArchiveRestoreInfo is not None or + self.AssociatedLabel is not None or + self.Class is not None or + self.ClassOfMailCode is not None or + self.DeliveryNotificationDate is not None or + self.DestinationCity is not None or + self.DestinationCountryCode is not None or + self.DestinationState is not None or + self.DestinationZip is not None or + self.EditedLabelID is not None or + self.EmailEnabled is not None or + self.ExpectedDeliveryDate is not None or + self.ExpectedDeliveryTime is not None or + self.GuaranteedDeliveryDate is not None or + self.GuaranteedDeliveryTime is not None or + self.GuaranteedDetails is not None or + self.KahalaIndicator is not None or + self.MailTypeCode is not None or + self.MPDATE is not None or + self.MPSUFFIX is not None or + self.OriginCity is not None or + self.OriginCountryCode is not None or + self.OriginState is not None or + self.OriginZip is not None or + self.PodEnabled is not None or + self.PredictedDeliveryDate is not None or + self.PredictedDeliveryTime is not None or + self.PDWStart is not None or + self.PDWEnd is not None or + self.RelatedRRID is not None or + self.RestoreEnabled is not None or + self.RRAMenabled is not None or + self.RreEnabled is not None or + self.Service is not None or + self.ServiceTypeCode is not None or + self.Status is not None or + self.StatusCategory is not None or + self.StatusSummary is not None or + self.TABLECODE is not None or + self.TpodEnabled is not None or + self.ValueofArticle is not None or + self.EnabledNotificationRequests is not None or + self.TrackSummary is not None or + self.TrackDetail + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackInfoType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('TrackInfoType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'TrackInfoType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TrackInfoType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TrackInfoType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TrackInfoType'): + if self.ID is not None and 'ID' not in already_processed: + already_processed.add('ID') + outfile.write(' ID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ID), input_name='ID')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackInfoType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.AdditionalInfo is not None: + namespaceprefix_ = self.AdditionalInfo_nsprefix_ + ':' if (UseCapturedNS_ and self.AdditionalInfo_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAdditionalInfo>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AdditionalInfo), input_name='AdditionalInfo')), namespaceprefix_ , eol_)) + if self.ADPScripting is not None: + namespaceprefix_ = self.ADPScripting_nsprefix_ + ':' if (UseCapturedNS_ and self.ADPScripting_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sADPScripting>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ADPScripting), input_name='ADPScripting')), namespaceprefix_ , eol_)) + if self.ARCHDATA is not None: + namespaceprefix_ = self.ARCHDATA_nsprefix_ + ':' if (UseCapturedNS_ and self.ARCHDATA_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sARCHDATA>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.ARCHDATA, input_name='ARCHDATA'), namespaceprefix_ , eol_)) + if self.ArchiveRestoreInfo is not None: + namespaceprefix_ = self.ArchiveRestoreInfo_nsprefix_ + ':' if (UseCapturedNS_ and self.ArchiveRestoreInfo_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sArchiveRestoreInfo>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ArchiveRestoreInfo), input_name='ArchiveRestoreInfo')), namespaceprefix_ , eol_)) + if self.AssociatedLabel is not None: + namespaceprefix_ = self.AssociatedLabel_nsprefix_ + ':' if (UseCapturedNS_ and self.AssociatedLabel_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAssociatedLabel>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AssociatedLabel), input_name='AssociatedLabel')), namespaceprefix_ , eol_)) + if self.Class is not None: + namespaceprefix_ = self.Class_nsprefix_ + ':' if (UseCapturedNS_ and self.Class_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sClass>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Class), input_name='Class')), namespaceprefix_ , eol_)) + if self.ClassOfMailCode is not None: + namespaceprefix_ = self.ClassOfMailCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ClassOfMailCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sClassOfMailCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ClassOfMailCode), input_name='ClassOfMailCode')), namespaceprefix_ , eol_)) + if self.DeliveryNotificationDate is not None: + namespaceprefix_ = self.DeliveryNotificationDate_nsprefix_ + ':' if (UseCapturedNS_ and self.DeliveryNotificationDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDeliveryNotificationDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DeliveryNotificationDate), input_name='DeliveryNotificationDate')), namespaceprefix_ , eol_)) + if self.DestinationCity is not None: + namespaceprefix_ = self.DestinationCity_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DestinationCity), input_name='DestinationCity')), namespaceprefix_ , eol_)) + if self.DestinationCountryCode is not None: + namespaceprefix_ = self.DestinationCountryCode_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationCountryCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationCountryCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DestinationCountryCode), input_name='DestinationCountryCode')), namespaceprefix_ , eol_)) + if self.DestinationState is not None: + namespaceprefix_ = self.DestinationState_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DestinationState), input_name='DestinationState')), namespaceprefix_ , eol_)) + if self.DestinationZip is not None: + namespaceprefix_ = self.DestinationZip_nsprefix_ + ':' if (UseCapturedNS_ and self.DestinationZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDestinationZip>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.DestinationZip, input_name='DestinationZip'), namespaceprefix_ , eol_)) + if self.EditedLabelID is not None: + namespaceprefix_ = self.EditedLabelID_nsprefix_ + ':' if (UseCapturedNS_ and self.EditedLabelID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEditedLabelID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EditedLabelID), input_name='EditedLabelID')), namespaceprefix_ , eol_)) + if self.EmailEnabled is not None: + namespaceprefix_ = self.EmailEnabled_nsprefix_ + ':' if (UseCapturedNS_ and self.EmailEnabled_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEmailEnabled>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EmailEnabled), input_name='EmailEnabled')), namespaceprefix_ , eol_)) + if self.ExpectedDeliveryDate is not None: + namespaceprefix_ = self.ExpectedDeliveryDate_nsprefix_ + ':' if (UseCapturedNS_ and self.ExpectedDeliveryDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sExpectedDeliveryDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ExpectedDeliveryDate), input_name='ExpectedDeliveryDate')), namespaceprefix_ , eol_)) + if self.ExpectedDeliveryTime is not None: + namespaceprefix_ = self.ExpectedDeliveryTime_nsprefix_ + ':' if (UseCapturedNS_ and self.ExpectedDeliveryTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sExpectedDeliveryTime>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ExpectedDeliveryTime), input_name='ExpectedDeliveryTime')), namespaceprefix_ , eol_)) + if self.GuaranteedDeliveryDate is not None: + namespaceprefix_ = self.GuaranteedDeliveryDate_nsprefix_ + ':' if (UseCapturedNS_ and self.GuaranteedDeliveryDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGuaranteedDeliveryDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.GuaranteedDeliveryDate), input_name='GuaranteedDeliveryDate')), namespaceprefix_ , eol_)) + if self.GuaranteedDeliveryTime is not None: + namespaceprefix_ = self.GuaranteedDeliveryTime_nsprefix_ + ':' if (UseCapturedNS_ and self.GuaranteedDeliveryTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGuaranteedDeliveryTime>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.GuaranteedDeliveryTime), input_name='GuaranteedDeliveryTime')), namespaceprefix_ , eol_)) + if self.GuaranteedDetails is not None: + namespaceprefix_ = self.GuaranteedDetails_nsprefix_ + ':' if (UseCapturedNS_ and self.GuaranteedDetails_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGuaranteedDetails>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.GuaranteedDetails), input_name='GuaranteedDetails')), namespaceprefix_ , eol_)) + if self.KahalaIndicator is not None: + namespaceprefix_ = self.KahalaIndicator_nsprefix_ + ':' if (UseCapturedNS_ and self.KahalaIndicator_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sKahalaIndicator>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.KahalaIndicator), input_name='KahalaIndicator')), namespaceprefix_ , eol_)) + if self.MailTypeCode is not None: + namespaceprefix_ = self.MailTypeCode_nsprefix_ + ':' if (UseCapturedNS_ and self.MailTypeCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMailTypeCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MailTypeCode), input_name='MailTypeCode')), namespaceprefix_ , eol_)) + if self.MPDATE is not None: + namespaceprefix_ = self.MPDATE_nsprefix_ + ':' if (UseCapturedNS_ and self.MPDATE_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMPDATE>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MPDATE), input_name='MPDATE')), namespaceprefix_ , eol_)) + if self.MPSUFFIX is not None: + namespaceprefix_ = self.MPSUFFIX_nsprefix_ + ':' if (UseCapturedNS_ and self.MPSUFFIX_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sMPSUFFIX>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.MPSUFFIX, input_name='MPSUFFIX'), namespaceprefix_ , eol_)) + if self.OriginCity is not None: + namespaceprefix_ = self.OriginCity_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.OriginCity), input_name='OriginCity')), namespaceprefix_ , eol_)) + if self.OriginCountryCode is not None: + namespaceprefix_ = self.OriginCountryCode_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginCountryCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginCountryCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.OriginCountryCode), input_name='OriginCountryCode')), namespaceprefix_ , eol_)) + if self.OriginState is not None: + namespaceprefix_ = self.OriginState_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.OriginState), input_name='OriginState')), namespaceprefix_ , eol_)) + if self.OriginZip is not None: + namespaceprefix_ = self.OriginZip_nsprefix_ + ':' if (UseCapturedNS_ and self.OriginZip_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOriginZip>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.OriginZip), input_name='OriginZip')), namespaceprefix_ , eol_)) + if self.PodEnabled is not None: + namespaceprefix_ = self.PodEnabled_nsprefix_ + ':' if (UseCapturedNS_ and self.PodEnabled_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPodEnabled>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PodEnabled), input_name='PodEnabled')), namespaceprefix_ , eol_)) + if self.PredictedDeliveryDate is not None: + namespaceprefix_ = self.PredictedDeliveryDate_nsprefix_ + ':' if (UseCapturedNS_ and self.PredictedDeliveryDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPredictedDeliveryDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PredictedDeliveryDate), input_name='PredictedDeliveryDate')), namespaceprefix_ , eol_)) + if self.PredictedDeliveryTime is not None: + namespaceprefix_ = self.PredictedDeliveryTime_nsprefix_ + ':' if (UseCapturedNS_ and self.PredictedDeliveryTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPredictedDeliveryTime>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PredictedDeliveryTime), input_name='PredictedDeliveryTime')), namespaceprefix_ , eol_)) + if self.PDWStart is not None: + namespaceprefix_ = self.PDWStart_nsprefix_ + ':' if (UseCapturedNS_ and self.PDWStart_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPDWStart>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PDWStart), input_name='PDWStart')), namespaceprefix_ , eol_)) + if self.PDWEnd is not None: + namespaceprefix_ = self.PDWEnd_nsprefix_ + ':' if (UseCapturedNS_ and self.PDWEnd_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sPDWEnd>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PDWEnd), input_name='PDWEnd')), namespaceprefix_ , eol_)) + if self.RelatedRRID is not None: + namespaceprefix_ = self.RelatedRRID_nsprefix_ + ':' if (UseCapturedNS_ and self.RelatedRRID_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRelatedRRID>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RelatedRRID), input_name='RelatedRRID')), namespaceprefix_ , eol_)) + if self.RestoreEnabled is not None: + namespaceprefix_ = self.RestoreEnabled_nsprefix_ + ':' if (UseCapturedNS_ and self.RestoreEnabled_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRestoreEnabled>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.RestoreEnabled, input_name='RestoreEnabled'), namespaceprefix_ , eol_)) + if self.RRAMenabled is not None: + namespaceprefix_ = self.RRAMenabled_nsprefix_ + ':' if (UseCapturedNS_ and self.RRAMenabled_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRRAMenabled>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.RRAMenabled, input_name='RRAMenabled'), namespaceprefix_ , eol_)) + if self.RreEnabled is not None: + namespaceprefix_ = self.RreEnabled_nsprefix_ + ':' if (UseCapturedNS_ and self.RreEnabled_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sRreEnabled>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.RreEnabled, input_name='RreEnabled'), namespaceprefix_ , eol_)) + if self.Service is not None: + namespaceprefix_ = self.Service_nsprefix_ + ':' if (UseCapturedNS_ and self.Service_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sService>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Service), input_name='Service')), namespaceprefix_ , eol_)) + if self.ServiceTypeCode is not None: + namespaceprefix_ = self.ServiceTypeCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceTypeCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sServiceTypeCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceTypeCode), input_name='ServiceTypeCode')), namespaceprefix_ , eol_)) + if self.Status is not None: + namespaceprefix_ = self.Status_nsprefix_ + ':' if (UseCapturedNS_ and self.Status_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sStatus>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Status), input_name='Status')), namespaceprefix_ , eol_)) + if self.StatusCategory is not None: + namespaceprefix_ = self.StatusCategory_nsprefix_ + ':' if (UseCapturedNS_ and self.StatusCategory_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sStatusCategory>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.StatusCategory), input_name='StatusCategory')), namespaceprefix_ , eol_)) + if self.StatusSummary is not None: + namespaceprefix_ = self.StatusSummary_nsprefix_ + ':' if (UseCapturedNS_ and self.StatusSummary_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sStatusSummary>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.StatusSummary), input_name='StatusSummary')), namespaceprefix_ , eol_)) + if self.TABLECODE is not None: + namespaceprefix_ = self.TABLECODE_nsprefix_ + ':' if (UseCapturedNS_ and self.TABLECODE_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTABLECODE>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TABLECODE), input_name='TABLECODE')), namespaceprefix_ , eol_)) + if self.TpodEnabled is not None: + namespaceprefix_ = self.TpodEnabled_nsprefix_ + ':' if (UseCapturedNS_ and self.TpodEnabled_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTpodEnabled>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.TpodEnabled, input_name='TpodEnabled'), namespaceprefix_ , eol_)) + if self.ValueofArticle is not None: + namespaceprefix_ = self.ValueofArticle_nsprefix_ + ':' if (UseCapturedNS_ and self.ValueofArticle_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sValueofArticle>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ValueofArticle), input_name='ValueofArticle')), namespaceprefix_ , eol_)) + if self.EnabledNotificationRequests is not None: + namespaceprefix_ = self.EnabledNotificationRequests_nsprefix_ + ':' if (UseCapturedNS_ and self.EnabledNotificationRequests_nsprefix_) else '' + self.EnabledNotificationRequests.export(outfile, level, namespaceprefix_, namespacedef_='', name_='EnabledNotificationRequests', pretty_print=pretty_print) + if self.TrackSummary is not None: + namespaceprefix_ = self.TrackSummary_nsprefix_ + ':' if (UseCapturedNS_ and self.TrackSummary_nsprefix_) else '' + self.TrackSummary.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TrackSummary', pretty_print=pretty_print) + for TrackDetail_ in self.TrackDetail: + namespaceprefix_ = self.TrackDetail_nsprefix_ + ':' if (UseCapturedNS_ and self.TrackDetail_nsprefix_) else '' + TrackDetail_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TrackDetail', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ID', node) + if value is not None and 'ID' not in already_processed: + already_processed.add('ID') + self.ID = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'AdditionalInfo': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AdditionalInfo') + value_ = self.gds_validate_string(value_, node, 'AdditionalInfo') + self.AdditionalInfo = value_ + self.AdditionalInfo_nsprefix_ = child_.prefix + elif nodeName_ == 'ADPScripting': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ADPScripting') + value_ = self.gds_validate_string(value_, node, 'ADPScripting') + self.ADPScripting = value_ + self.ADPScripting_nsprefix_ = child_.prefix + elif nodeName_ == 'ARCHDATA': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'ARCHDATA') + ival_ = self.gds_validate_boolean(ival_, node, 'ARCHDATA') + self.ARCHDATA = ival_ + self.ARCHDATA_nsprefix_ = child_.prefix + elif nodeName_ == 'ArchiveRestoreInfo': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ArchiveRestoreInfo') + value_ = self.gds_validate_string(value_, node, 'ArchiveRestoreInfo') + self.ArchiveRestoreInfo = value_ + self.ArchiveRestoreInfo_nsprefix_ = child_.prefix + elif nodeName_ == 'AssociatedLabel': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'AssociatedLabel') + value_ = self.gds_validate_string(value_, node, 'AssociatedLabel') + self.AssociatedLabel = value_ + self.AssociatedLabel_nsprefix_ = child_.prefix + elif nodeName_ == 'Class': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Class') + value_ = self.gds_validate_string(value_, node, 'Class') + self.Class = value_ + self.Class_nsprefix_ = child_.prefix + elif nodeName_ == 'ClassOfMailCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ClassOfMailCode') + value_ = self.gds_validate_string(value_, node, 'ClassOfMailCode') + self.ClassOfMailCode = value_ + self.ClassOfMailCode_nsprefix_ = child_.prefix + elif nodeName_ == 'DeliveryNotificationDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DeliveryNotificationDate') + value_ = self.gds_validate_string(value_, node, 'DeliveryNotificationDate') + self.DeliveryNotificationDate = value_ + self.DeliveryNotificationDate_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DestinationCity') + value_ = self.gds_validate_string(value_, node, 'DestinationCity') + self.DestinationCity = value_ + self.DestinationCity_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationCountryCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DestinationCountryCode') + value_ = self.gds_validate_string(value_, node, 'DestinationCountryCode') + self.DestinationCountryCode = value_ + self.DestinationCountryCode_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'DestinationState') + value_ = self.gds_validate_string(value_, node, 'DestinationState') + self.DestinationState = value_ + self.DestinationState_nsprefix_ = child_.prefix + elif nodeName_ == 'DestinationZip' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'DestinationZip') + ival_ = self.gds_validate_integer(ival_, node, 'DestinationZip') + self.DestinationZip = ival_ + self.DestinationZip_nsprefix_ = child_.prefix + elif nodeName_ == 'EditedLabelID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EditedLabelID') + value_ = self.gds_validate_string(value_, node, 'EditedLabelID') + self.EditedLabelID = value_ + self.EditedLabelID_nsprefix_ = child_.prefix + elif nodeName_ == 'EmailEnabled': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EmailEnabled') + value_ = self.gds_validate_string(value_, node, 'EmailEnabled') + self.EmailEnabled = value_ + self.EmailEnabled_nsprefix_ = child_.prefix + elif nodeName_ == 'ExpectedDeliveryDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ExpectedDeliveryDate') + value_ = self.gds_validate_string(value_, node, 'ExpectedDeliveryDate') + self.ExpectedDeliveryDate = value_ + self.ExpectedDeliveryDate_nsprefix_ = child_.prefix + elif nodeName_ == 'ExpectedDeliveryTime': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ExpectedDeliveryTime') + value_ = self.gds_validate_string(value_, node, 'ExpectedDeliveryTime') + self.ExpectedDeliveryTime = value_ + self.ExpectedDeliveryTime_nsprefix_ = child_.prefix + elif nodeName_ == 'GuaranteedDeliveryDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'GuaranteedDeliveryDate') + value_ = self.gds_validate_string(value_, node, 'GuaranteedDeliveryDate') + self.GuaranteedDeliveryDate = value_ + self.GuaranteedDeliveryDate_nsprefix_ = child_.prefix + elif nodeName_ == 'GuaranteedDeliveryTime': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'GuaranteedDeliveryTime') + value_ = self.gds_validate_string(value_, node, 'GuaranteedDeliveryTime') + self.GuaranteedDeliveryTime = value_ + self.GuaranteedDeliveryTime_nsprefix_ = child_.prefix + elif nodeName_ == 'GuaranteedDetails': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'GuaranteedDetails') + value_ = self.gds_validate_string(value_, node, 'GuaranteedDetails') + self.GuaranteedDetails = value_ + self.GuaranteedDetails_nsprefix_ = child_.prefix + elif nodeName_ == 'KahalaIndicator': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'KahalaIndicator') + value_ = self.gds_validate_string(value_, node, 'KahalaIndicator') + self.KahalaIndicator = value_ + self.KahalaIndicator_nsprefix_ = child_.prefix + elif nodeName_ == 'MailTypeCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'MailTypeCode') + value_ = self.gds_validate_string(value_, node, 'MailTypeCode') + self.MailTypeCode = value_ + self.MailTypeCode_nsprefix_ = child_.prefix + elif nodeName_ == 'MPDATE': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'MPDATE') + value_ = self.gds_validate_string(value_, node, 'MPDATE') + self.MPDATE = value_ + self.MPDATE_nsprefix_ = child_.prefix + elif nodeName_ == 'MPSUFFIX' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'MPSUFFIX') + ival_ = self.gds_validate_integer(ival_, node, 'MPSUFFIX') + self.MPSUFFIX = ival_ + self.MPSUFFIX_nsprefix_ = child_.prefix + elif nodeName_ == 'OriginCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'OriginCity') + value_ = self.gds_validate_string(value_, node, 'OriginCity') + self.OriginCity = value_ + self.OriginCity_nsprefix_ = child_.prefix + elif nodeName_ == 'OriginCountryCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'OriginCountryCode') + value_ = self.gds_validate_string(value_, node, 'OriginCountryCode') + self.OriginCountryCode = value_ + self.OriginCountryCode_nsprefix_ = child_.prefix + elif nodeName_ == 'OriginState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'OriginState') + value_ = self.gds_validate_string(value_, node, 'OriginState') + self.OriginState = value_ + self.OriginState_nsprefix_ = child_.prefix + elif nodeName_ == 'OriginZip': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'OriginZip') + value_ = self.gds_validate_string(value_, node, 'OriginZip') + self.OriginZip = value_ + self.OriginZip_nsprefix_ = child_.prefix + elif nodeName_ == 'PodEnabled': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PodEnabled') + value_ = self.gds_validate_string(value_, node, 'PodEnabled') + self.PodEnabled = value_ + self.PodEnabled_nsprefix_ = child_.prefix + elif nodeName_ == 'PredictedDeliveryDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PredictedDeliveryDate') + value_ = self.gds_validate_string(value_, node, 'PredictedDeliveryDate') + self.PredictedDeliveryDate = value_ + self.PredictedDeliveryDate_nsprefix_ = child_.prefix + elif nodeName_ == 'PredictedDeliveryTime': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PredictedDeliveryTime') + value_ = self.gds_validate_string(value_, node, 'PredictedDeliveryTime') + self.PredictedDeliveryTime = value_ + self.PredictedDeliveryTime_nsprefix_ = child_.prefix + elif nodeName_ == 'PDWStart': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PDWStart') + value_ = self.gds_validate_string(value_, node, 'PDWStart') + self.PDWStart = value_ + self.PDWStart_nsprefix_ = child_.prefix + elif nodeName_ == 'PDWEnd': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'PDWEnd') + value_ = self.gds_validate_string(value_, node, 'PDWEnd') + self.PDWEnd = value_ + self.PDWEnd_nsprefix_ = child_.prefix + elif nodeName_ == 'RelatedRRID': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'RelatedRRID') + value_ = self.gds_validate_string(value_, node, 'RelatedRRID') + self.RelatedRRID = value_ + self.RelatedRRID_nsprefix_ = child_.prefix + elif nodeName_ == 'RestoreEnabled': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'RestoreEnabled') + ival_ = self.gds_validate_boolean(ival_, node, 'RestoreEnabled') + self.RestoreEnabled = ival_ + self.RestoreEnabled_nsprefix_ = child_.prefix + elif nodeName_ == 'RRAMenabled': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'RRAMenabled') + ival_ = self.gds_validate_boolean(ival_, node, 'RRAMenabled') + self.RRAMenabled = ival_ + self.RRAMenabled_nsprefix_ = child_.prefix + elif nodeName_ == 'RreEnabled': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'RreEnabled') + ival_ = self.gds_validate_boolean(ival_, node, 'RreEnabled') + self.RreEnabled = ival_ + self.RreEnabled_nsprefix_ = child_.prefix + elif nodeName_ == 'Service': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Service') + value_ = self.gds_validate_string(value_, node, 'Service') + self.Service = value_ + self.Service_nsprefix_ = child_.prefix + elif nodeName_ == 'ServiceTypeCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ServiceTypeCode') + value_ = self.gds_validate_string(value_, node, 'ServiceTypeCode') + self.ServiceTypeCode = value_ + self.ServiceTypeCode_nsprefix_ = child_.prefix + elif nodeName_ == 'Status': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Status') + value_ = self.gds_validate_string(value_, node, 'Status') + self.Status = value_ + self.Status_nsprefix_ = child_.prefix + elif nodeName_ == 'StatusCategory': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'StatusCategory') + value_ = self.gds_validate_string(value_, node, 'StatusCategory') + self.StatusCategory = value_ + self.StatusCategory_nsprefix_ = child_.prefix + elif nodeName_ == 'StatusSummary': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'StatusSummary') + value_ = self.gds_validate_string(value_, node, 'StatusSummary') + self.StatusSummary = value_ + self.StatusSummary_nsprefix_ = child_.prefix + elif nodeName_ == 'TABLECODE': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'TABLECODE') + value_ = self.gds_validate_string(value_, node, 'TABLECODE') + self.TABLECODE = value_ + self.TABLECODE_nsprefix_ = child_.prefix + elif nodeName_ == 'TpodEnabled': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'TpodEnabled') + ival_ = self.gds_validate_boolean(ival_, node, 'TpodEnabled') + self.TpodEnabled = ival_ + self.TpodEnabled_nsprefix_ = child_.prefix + elif nodeName_ == 'ValueofArticle': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ValueofArticle') + value_ = self.gds_validate_string(value_, node, 'ValueofArticle') + self.ValueofArticle = value_ + self.ValueofArticle_nsprefix_ = child_.prefix + elif nodeName_ == 'EnabledNotificationRequests': + obj_ = EnabledNotificationRequestsType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.EnabledNotificationRequests = obj_ + obj_.original_tagname_ = 'EnabledNotificationRequests' + elif nodeName_ == 'TrackSummary': + obj_ = TrackSummaryType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.TrackSummary = obj_ + obj_.original_tagname_ = 'TrackSummary' + elif nodeName_ == 'TrackDetail': + obj_ = TrackDetailType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.TrackDetail.append(obj_) + obj_.original_tagname_ = 'TrackDetail' +# end class TrackInfoType + + +class EnabledNotificationRequestsType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, SMS=None, EMAIL=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.SMS = SMS + self.SMS_nsprefix_ = None + self.EMAIL = EMAIL + self.EMAIL_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, EnabledNotificationRequestsType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if EnabledNotificationRequestsType.subclass: + return EnabledNotificationRequestsType.subclass(*args_, **kwargs_) + else: + return EnabledNotificationRequestsType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_SMS(self): + return self.SMS + def set_SMS(self, SMS): + self.SMS = SMS + def get_EMAIL(self): + return self.EMAIL + def set_EMAIL(self, EMAIL): + self.EMAIL = EMAIL + def has__content(self): + if ( + self.SMS is not None or + self.EMAIL is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='EnabledNotificationRequestsType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('EnabledNotificationRequestsType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'EnabledNotificationRequestsType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='EnabledNotificationRequestsType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='EnabledNotificationRequestsType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='EnabledNotificationRequestsType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='EnabledNotificationRequestsType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.SMS is not None: + namespaceprefix_ = self.SMS_nsprefix_ + ':' if (UseCapturedNS_ and self.SMS_nsprefix_) else '' + self.SMS.export(outfile, level, namespaceprefix_, namespacedef_='', name_='SMS', pretty_print=pretty_print) + if self.EMAIL is not None: + namespaceprefix_ = self.EMAIL_nsprefix_ + ':' if (UseCapturedNS_ and self.EMAIL_nsprefix_) else '' + self.EMAIL.export(outfile, level, namespaceprefix_, namespacedef_='', name_='EMAIL', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'SMS': + obj_ = SMSType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.SMS = obj_ + obj_.original_tagname_ = 'SMS' + elif nodeName_ == 'EMAIL': + obj_ = EMAILType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.EMAIL = obj_ + obj_.original_tagname_ = 'EMAIL' +# end class EnabledNotificationRequestsType + + +class SMSType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, FD=None, AL=None, TD=None, UP=None, DND=None, FS=None, OA=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.FD = FD + self.FD_nsprefix_ = None + self.AL = AL + self.AL_nsprefix_ = None + self.TD = TD + self.TD_nsprefix_ = None + self.UP = UP + self.UP_nsprefix_ = None + self.DND = DND + self.DND_nsprefix_ = None + self.FS = FS + self.FS_nsprefix_ = None + self.OA = OA + self.OA_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, SMSType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if SMSType.subclass: + return SMSType.subclass(*args_, **kwargs_) + else: + return SMSType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FD(self): + return self.FD + def set_FD(self, FD): + self.FD = FD + def get_AL(self): + return self.AL + def set_AL(self, AL): + self.AL = AL + def get_TD(self): + return self.TD + def set_TD(self, TD): + self.TD = TD + def get_UP(self): + return self.UP + def set_UP(self, UP): + self.UP = UP + def get_DND(self): + return self.DND + def set_DND(self, DND): + self.DND = DND + def get_FS(self): + return self.FS + def set_FS(self, FS): + self.FS = FS + def get_OA(self): + return self.OA + def set_OA(self, OA): + self.OA = OA + def has__content(self): + if ( + self.FD is not None or + self.AL is not None or + self.TD is not None or + self.UP is not None or + self.DND is not None or + self.FS is not None or + self.OA is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SMSType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('SMSType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'SMSType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SMSType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SMSType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SMSType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SMSType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FD is not None: + namespaceprefix_ = self.FD_nsprefix_ + ':' if (UseCapturedNS_ and self.FD_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFD>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.FD, input_name='FD'), namespaceprefix_ , eol_)) + if self.AL is not None: + namespaceprefix_ = self.AL_nsprefix_ + ':' if (UseCapturedNS_ and self.AL_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAL>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.AL, input_name='AL'), namespaceprefix_ , eol_)) + if self.TD is not None: + namespaceprefix_ = self.TD_nsprefix_ + ':' if (UseCapturedNS_ and self.TD_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTD>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.TD, input_name='TD'), namespaceprefix_ , eol_)) + if self.UP is not None: + namespaceprefix_ = self.UP_nsprefix_ + ':' if (UseCapturedNS_ and self.UP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUP>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.UP, input_name='UP'), namespaceprefix_ , eol_)) + if self.DND is not None: + namespaceprefix_ = self.DND_nsprefix_ + ':' if (UseCapturedNS_ and self.DND_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDND>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.DND, input_name='DND'), namespaceprefix_ , eol_)) + if self.FS is not None: + namespaceprefix_ = self.FS_nsprefix_ + ':' if (UseCapturedNS_ and self.FS_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFS>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.FS, input_name='FS'), namespaceprefix_ , eol_)) + if self.OA is not None: + namespaceprefix_ = self.OA_nsprefix_ + ':' if (UseCapturedNS_ and self.OA_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOA>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.OA, input_name='OA'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FD': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'FD') + ival_ = self.gds_validate_boolean(ival_, node, 'FD') + self.FD = ival_ + self.FD_nsprefix_ = child_.prefix + elif nodeName_ == 'AL': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'AL') + ival_ = self.gds_validate_boolean(ival_, node, 'AL') + self.AL = ival_ + self.AL_nsprefix_ = child_.prefix + elif nodeName_ == 'TD': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'TD') + ival_ = self.gds_validate_boolean(ival_, node, 'TD') + self.TD = ival_ + self.TD_nsprefix_ = child_.prefix + elif nodeName_ == 'UP': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'UP') + ival_ = self.gds_validate_boolean(ival_, node, 'UP') + self.UP = ival_ + self.UP_nsprefix_ = child_.prefix + elif nodeName_ == 'DND': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'DND') + ival_ = self.gds_validate_boolean(ival_, node, 'DND') + self.DND = ival_ + self.DND_nsprefix_ = child_.prefix + elif nodeName_ == 'FS': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'FS') + ival_ = self.gds_validate_boolean(ival_, node, 'FS') + self.FS = ival_ + self.FS_nsprefix_ = child_.prefix + elif nodeName_ == 'OA': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'OA') + ival_ = self.gds_validate_boolean(ival_, node, 'OA') + self.OA = ival_ + self.OA_nsprefix_ = child_.prefix +# end class SMSType + + +class EMAILType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, FD=None, AL=None, TD=None, UP=None, DND=None, FS=None, OA=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.FD = FD + self.FD_nsprefix_ = None + self.AL = AL + self.AL_nsprefix_ = None + self.TD = TD + self.TD_nsprefix_ = None + self.UP = UP + self.UP_nsprefix_ = None + self.DND = DND + self.DND_nsprefix_ = None + self.FS = FS + self.FS_nsprefix_ = None + self.OA = OA + self.OA_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, EMAILType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if EMAILType.subclass: + return EMAILType.subclass(*args_, **kwargs_) + else: + return EMAILType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_FD(self): + return self.FD + def set_FD(self, FD): + self.FD = FD + def get_AL(self): + return self.AL + def set_AL(self, AL): + self.AL = AL + def get_TD(self): + return self.TD + def set_TD(self, TD): + self.TD = TD + def get_UP(self): + return self.UP + def set_UP(self, UP): + self.UP = UP + def get_DND(self): + return self.DND + def set_DND(self, DND): + self.DND = DND + def get_FS(self): + return self.FS + def set_FS(self, FS): + self.FS = FS + def get_OA(self): + return self.OA + def set_OA(self, OA): + self.OA = OA + def has__content(self): + if ( + self.FD is not None or + self.AL is not None or + self.TD is not None or + self.UP is not None or + self.DND is not None or + self.FS is not None or + self.OA is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='EMAILType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('EMAILType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'EMAILType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='EMAILType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='EMAILType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='EMAILType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='EMAILType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.FD is not None: + namespaceprefix_ = self.FD_nsprefix_ + ':' if (UseCapturedNS_ and self.FD_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFD>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.FD, input_name='FD'), namespaceprefix_ , eol_)) + if self.AL is not None: + namespaceprefix_ = self.AL_nsprefix_ + ':' if (UseCapturedNS_ and self.AL_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAL>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.AL, input_name='AL'), namespaceprefix_ , eol_)) + if self.TD is not None: + namespaceprefix_ = self.TD_nsprefix_ + ':' if (UseCapturedNS_ and self.TD_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sTD>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.TD, input_name='TD'), namespaceprefix_ , eol_)) + if self.UP is not None: + namespaceprefix_ = self.UP_nsprefix_ + ':' if (UseCapturedNS_ and self.UP_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUP>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.UP, input_name='UP'), namespaceprefix_ , eol_)) + if self.DND is not None: + namespaceprefix_ = self.DND_nsprefix_ + ':' if (UseCapturedNS_ and self.DND_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sDND>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.DND, input_name='DND'), namespaceprefix_ , eol_)) + if self.FS is not None: + namespaceprefix_ = self.FS_nsprefix_ + ':' if (UseCapturedNS_ and self.FS_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFS>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.FS, input_name='FS'), namespaceprefix_ , eol_)) + if self.OA is not None: + namespaceprefix_ = self.OA_nsprefix_ + ':' if (UseCapturedNS_ and self.OA_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sOA>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.OA, input_name='OA'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'FD': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'FD') + ival_ = self.gds_validate_boolean(ival_, node, 'FD') + self.FD = ival_ + self.FD_nsprefix_ = child_.prefix + elif nodeName_ == 'AL': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'AL') + ival_ = self.gds_validate_boolean(ival_, node, 'AL') + self.AL = ival_ + self.AL_nsprefix_ = child_.prefix + elif nodeName_ == 'TD': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'TD') + ival_ = self.gds_validate_boolean(ival_, node, 'TD') + self.TD = ival_ + self.TD_nsprefix_ = child_.prefix + elif nodeName_ == 'UP': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'UP') + ival_ = self.gds_validate_boolean(ival_, node, 'UP') + self.UP = ival_ + self.UP_nsprefix_ = child_.prefix + elif nodeName_ == 'DND': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'DND') + ival_ = self.gds_validate_boolean(ival_, node, 'DND') + self.DND = ival_ + self.DND_nsprefix_ = child_.prefix + elif nodeName_ == 'FS': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'FS') + ival_ = self.gds_validate_boolean(ival_, node, 'FS') + self.FS = ival_ + self.FS_nsprefix_ = child_.prefix + elif nodeName_ == 'OA': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'OA') + ival_ = self.gds_validate_boolean(ival_, node, 'OA') + self.OA = ival_ + self.OA_nsprefix_ = child_.prefix +# end class EMAILType + + +class TrackSummaryType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, EventTime=None, EventDate=None, Event=None, EventCity=None, EventState=None, EventZIPCode=None, EventCountry=None, FirmName=None, Name=None, AuthorizedAgent=None, EventCode=None, ActionCode=None, ReasonCode=None, GeoCertified=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.EventTime = EventTime + self.EventTime_nsprefix_ = None + self.EventDate = EventDate + self.EventDate_nsprefix_ = None + self.Event = Event + self.Event_nsprefix_ = None + self.EventCity = EventCity + self.EventCity_nsprefix_ = None + self.EventState = EventState + self.EventState_nsprefix_ = None + self.EventZIPCode = EventZIPCode + self.EventZIPCode_nsprefix_ = None + self.EventCountry = EventCountry + self.EventCountry_nsprefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.Name = Name + self.Name_nsprefix_ = None + self.AuthorizedAgent = AuthorizedAgent + self.AuthorizedAgent_nsprefix_ = None + self.EventCode = EventCode + self.EventCode_nsprefix_ = None + self.ActionCode = ActionCode + self.ActionCode_nsprefix_ = None + self.ReasonCode = ReasonCode + self.ReasonCode_nsprefix_ = None + self.GeoCertified = GeoCertified + self.GeoCertified_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, TrackSummaryType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if TrackSummaryType.subclass: + return TrackSummaryType.subclass(*args_, **kwargs_) + else: + return TrackSummaryType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_EventTime(self): + return self.EventTime + def set_EventTime(self, EventTime): + self.EventTime = EventTime + def get_EventDate(self): + return self.EventDate + def set_EventDate(self, EventDate): + self.EventDate = EventDate + def get_Event(self): + return self.Event + def set_Event(self, Event): + self.Event = Event + def get_EventCity(self): + return self.EventCity + def set_EventCity(self, EventCity): + self.EventCity = EventCity + def get_EventState(self): + return self.EventState + def set_EventState(self, EventState): + self.EventState = EventState + def get_EventZIPCode(self): + return self.EventZIPCode + def set_EventZIPCode(self, EventZIPCode): + self.EventZIPCode = EventZIPCode + def get_EventCountry(self): + return self.EventCountry + def set_EventCountry(self, EventCountry): + self.EventCountry = EventCountry + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_Name(self): + return self.Name + def set_Name(self, Name): + self.Name = Name + def get_AuthorizedAgent(self): + return self.AuthorizedAgent + def set_AuthorizedAgent(self, AuthorizedAgent): + self.AuthorizedAgent = AuthorizedAgent + def get_EventCode(self): + return self.EventCode + def set_EventCode(self, EventCode): + self.EventCode = EventCode + def get_ActionCode(self): + return self.ActionCode + def set_ActionCode(self, ActionCode): + self.ActionCode = ActionCode + def get_ReasonCode(self): + return self.ReasonCode + def set_ReasonCode(self, ReasonCode): + self.ReasonCode = ReasonCode + def get_GeoCertified(self): + return self.GeoCertified + def set_GeoCertified(self, GeoCertified): + self.GeoCertified = GeoCertified + def has__content(self): + if ( + self.EventTime is not None or + self.EventDate is not None or + self.Event is not None or + self.EventCity is not None or + self.EventState is not None or + self.EventZIPCode is not None or + self.EventCountry is not None or + self.FirmName is not None or + self.Name is not None or + self.AuthorizedAgent is not None or + self.EventCode is not None or + self.ActionCode is not None or + self.ReasonCode is not None or + self.GeoCertified is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackSummaryType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('TrackSummaryType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'TrackSummaryType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TrackSummaryType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TrackSummaryType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TrackSummaryType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackSummaryType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.EventTime is not None: + namespaceprefix_ = self.EventTime_nsprefix_ + ':' if (UseCapturedNS_ and self.EventTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEventTime>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EventTime), input_name='EventTime')), namespaceprefix_ , eol_)) + if self.EventDate is not None: + namespaceprefix_ = self.EventDate_nsprefix_ + ':' if (UseCapturedNS_ and self.EventDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEventDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EventDate), input_name='EventDate')), namespaceprefix_ , eol_)) + if self.Event is not None: + namespaceprefix_ = self.Event_nsprefix_ + ':' if (UseCapturedNS_ and self.Event_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEvent>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Event), input_name='Event')), namespaceprefix_ , eol_)) + if self.EventCity is not None: + namespaceprefix_ = self.EventCity_nsprefix_ + ':' if (UseCapturedNS_ and self.EventCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEventCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EventCity), input_name='EventCity')), namespaceprefix_ , eol_)) + if self.EventState is not None: + namespaceprefix_ = self.EventState_nsprefix_ + ':' if (UseCapturedNS_ and self.EventState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEventState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EventState), input_name='EventState')), namespaceprefix_ , eol_)) + if self.EventZIPCode is not None: + namespaceprefix_ = self.EventZIPCode_nsprefix_ + ':' if (UseCapturedNS_ and self.EventZIPCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEventZIPCode>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.EventZIPCode, input_name='EventZIPCode'), namespaceprefix_ , eol_)) + if self.EventCountry is not None: + namespaceprefix_ = self.EventCountry_nsprefix_ + ':' if (UseCapturedNS_ and self.EventCountry_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEventCountry>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EventCountry), input_name='EventCountry')), namespaceprefix_ , eol_)) + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.Name is not None: + namespaceprefix_ = self.Name_nsprefix_ + ':' if (UseCapturedNS_ and self.Name_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Name), input_name='Name')), namespaceprefix_ , eol_)) + if self.AuthorizedAgent is not None: + namespaceprefix_ = self.AuthorizedAgent_nsprefix_ + ':' if (UseCapturedNS_ and self.AuthorizedAgent_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAuthorizedAgent>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.AuthorizedAgent, input_name='AuthorizedAgent'), namespaceprefix_ , eol_)) + if self.EventCode is not None: + namespaceprefix_ = self.EventCode_nsprefix_ + ':' if (UseCapturedNS_ and self.EventCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEventCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EventCode), input_name='EventCode')), namespaceprefix_ , eol_)) + if self.ActionCode is not None: + namespaceprefix_ = self.ActionCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ActionCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sActionCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ActionCode), input_name='ActionCode')), namespaceprefix_ , eol_)) + if self.ReasonCode is not None: + namespaceprefix_ = self.ReasonCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ReasonCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sReasonCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ReasonCode), input_name='ReasonCode')), namespaceprefix_ , eol_)) + if self.GeoCertified is not None: + namespaceprefix_ = self.GeoCertified_nsprefix_ + ':' if (UseCapturedNS_ and self.GeoCertified_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGeoCertified>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.GeoCertified, input_name='GeoCertified'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'EventTime': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EventTime') + value_ = self.gds_validate_string(value_, node, 'EventTime') + self.EventTime = value_ + self.EventTime_nsprefix_ = child_.prefix + elif nodeName_ == 'EventDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EventDate') + value_ = self.gds_validate_string(value_, node, 'EventDate') + self.EventDate = value_ + self.EventDate_nsprefix_ = child_.prefix + elif nodeName_ == 'Event': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Event') + value_ = self.gds_validate_string(value_, node, 'Event') + self.Event = value_ + self.Event_nsprefix_ = child_.prefix + elif nodeName_ == 'EventCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EventCity') + value_ = self.gds_validate_string(value_, node, 'EventCity') + self.EventCity = value_ + self.EventCity_nsprefix_ = child_.prefix + elif nodeName_ == 'EventState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EventState') + value_ = self.gds_validate_string(value_, node, 'EventState') + self.EventState = value_ + self.EventState_nsprefix_ = child_.prefix + elif nodeName_ == 'EventZIPCode' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'EventZIPCode') + ival_ = self.gds_validate_integer(ival_, node, 'EventZIPCode') + self.EventZIPCode = ival_ + self.EventZIPCode_nsprefix_ = child_.prefix + elif nodeName_ == 'EventCountry': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EventCountry') + value_ = self.gds_validate_string(value_, node, 'EventCountry') + self.EventCountry = value_ + self.EventCountry_nsprefix_ = child_.prefix + elif nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'Name': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Name') + value_ = self.gds_validate_string(value_, node, 'Name') + self.Name = value_ + self.Name_nsprefix_ = child_.prefix + elif nodeName_ == 'AuthorizedAgent': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'AuthorizedAgent') + ival_ = self.gds_validate_boolean(ival_, node, 'AuthorizedAgent') + self.AuthorizedAgent = ival_ + self.AuthorizedAgent_nsprefix_ = child_.prefix + elif nodeName_ == 'EventCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EventCode') + value_ = self.gds_validate_string(value_, node, 'EventCode') + self.EventCode = value_ + self.EventCode_nsprefix_ = child_.prefix + elif nodeName_ == 'ActionCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ActionCode') + value_ = self.gds_validate_string(value_, node, 'ActionCode') + self.ActionCode = value_ + self.ActionCode_nsprefix_ = child_.prefix + elif nodeName_ == 'ReasonCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ReasonCode') + value_ = self.gds_validate_string(value_, node, 'ReasonCode') + self.ReasonCode = value_ + self.ReasonCode_nsprefix_ = child_.prefix + elif nodeName_ == 'GeoCertified': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'GeoCertified') + ival_ = self.gds_validate_boolean(ival_, node, 'GeoCertified') + self.GeoCertified = ival_ + self.GeoCertified_nsprefix_ = child_.prefix +# end class TrackSummaryType + + +class TrackDetailType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, EventTime=None, EventDate=None, Event=None, EventCity=None, EventState=None, EventZIPCode=None, EventCountry=None, FirmName=None, Name=None, AuthorizedAgent=None, GeoCertified=None, EventCode=None, ActionCode=None, ReasonCode=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.EventTime = EventTime + self.EventTime_nsprefix_ = None + self.EventDate = EventDate + self.EventDate_nsprefix_ = None + self.Event = Event + self.Event_nsprefix_ = None + self.EventCity = EventCity + self.EventCity_nsprefix_ = None + self.EventState = EventState + self.EventState_nsprefix_ = None + self.EventZIPCode = EventZIPCode + self.EventZIPCode_nsprefix_ = None + self.EventCountry = EventCountry + self.EventCountry_nsprefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.Name = Name + self.Name_nsprefix_ = None + self.AuthorizedAgent = AuthorizedAgent + self.AuthorizedAgent_nsprefix_ = None + self.GeoCertified = GeoCertified + self.GeoCertified_nsprefix_ = None + self.EventCode = EventCode + self.EventCode_nsprefix_ = None + self.ActionCode = ActionCode + self.ActionCode_nsprefix_ = None + self.ReasonCode = ReasonCode + self.ReasonCode_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, TrackDetailType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if TrackDetailType.subclass: + return TrackDetailType.subclass(*args_, **kwargs_) + else: + return TrackDetailType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_EventTime(self): + return self.EventTime + def set_EventTime(self, EventTime): + self.EventTime = EventTime + def get_EventDate(self): + return self.EventDate + def set_EventDate(self, EventDate): + self.EventDate = EventDate + def get_Event(self): + return self.Event + def set_Event(self, Event): + self.Event = Event + def get_EventCity(self): + return self.EventCity + def set_EventCity(self, EventCity): + self.EventCity = EventCity + def get_EventState(self): + return self.EventState + def set_EventState(self, EventState): + self.EventState = EventState + def get_EventZIPCode(self): + return self.EventZIPCode + def set_EventZIPCode(self, EventZIPCode): + self.EventZIPCode = EventZIPCode + def get_EventCountry(self): + return self.EventCountry + def set_EventCountry(self, EventCountry): + self.EventCountry = EventCountry + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_Name(self): + return self.Name + def set_Name(self, Name): + self.Name = Name + def get_AuthorizedAgent(self): + return self.AuthorizedAgent + def set_AuthorizedAgent(self, AuthorizedAgent): + self.AuthorizedAgent = AuthorizedAgent + def get_GeoCertified(self): + return self.GeoCertified + def set_GeoCertified(self, GeoCertified): + self.GeoCertified = GeoCertified + def get_EventCode(self): + return self.EventCode + def set_EventCode(self, EventCode): + self.EventCode = EventCode + def get_ActionCode(self): + return self.ActionCode + def set_ActionCode(self, ActionCode): + self.ActionCode = ActionCode + def get_ReasonCode(self): + return self.ReasonCode + def set_ReasonCode(self, ReasonCode): + self.ReasonCode = ReasonCode + def has__content(self): + if ( + self.EventTime is not None or + self.EventDate is not None or + self.Event is not None or + self.EventCity is not None or + self.EventState is not None or + self.EventZIPCode is not None or + self.EventCountry is not None or + self.FirmName is not None or + self.Name is not None or + self.AuthorizedAgent is not None or + self.GeoCertified is not None or + self.EventCode is not None or + self.ActionCode is not None or + self.ReasonCode is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackDetailType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('TrackDetailType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'TrackDetailType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TrackDetailType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TrackDetailType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TrackDetailType'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TrackDetailType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.EventTime is not None: + namespaceprefix_ = self.EventTime_nsprefix_ + ':' if (UseCapturedNS_ and self.EventTime_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEventTime>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EventTime), input_name='EventTime')), namespaceprefix_ , eol_)) + if self.EventDate is not None: + namespaceprefix_ = self.EventDate_nsprefix_ + ':' if (UseCapturedNS_ and self.EventDate_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEventDate>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EventDate), input_name='EventDate')), namespaceprefix_ , eol_)) + if self.Event is not None: + namespaceprefix_ = self.Event_nsprefix_ + ':' if (UseCapturedNS_ and self.Event_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEvent>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Event), input_name='Event')), namespaceprefix_ , eol_)) + if self.EventCity is not None: + namespaceprefix_ = self.EventCity_nsprefix_ + ':' if (UseCapturedNS_ and self.EventCity_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEventCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EventCity), input_name='EventCity')), namespaceprefix_ , eol_)) + if self.EventState is not None: + namespaceprefix_ = self.EventState_nsprefix_ + ':' if (UseCapturedNS_ and self.EventState_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEventState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EventState), input_name='EventState')), namespaceprefix_ , eol_)) + if self.EventZIPCode is not None: + namespaceprefix_ = self.EventZIPCode_nsprefix_ + ':' if (UseCapturedNS_ and self.EventZIPCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEventZIPCode>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.EventZIPCode, input_name='EventZIPCode'), namespaceprefix_ , eol_)) + if self.EventCountry is not None: + namespaceprefix_ = self.EventCountry_nsprefix_ + ':' if (UseCapturedNS_ and self.EventCountry_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEventCountry>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EventCountry), input_name='EventCountry')), namespaceprefix_ , eol_)) + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.Name is not None: + namespaceprefix_ = self.Name_nsprefix_ + ':' if (UseCapturedNS_ and self.Name_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Name), input_name='Name')), namespaceprefix_ , eol_)) + if self.AuthorizedAgent is not None: + namespaceprefix_ = self.AuthorizedAgent_nsprefix_ + ':' if (UseCapturedNS_ and self.AuthorizedAgent_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAuthorizedAgent>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.AuthorizedAgent, input_name='AuthorizedAgent'), namespaceprefix_ , eol_)) + if self.GeoCertified is not None: + namespaceprefix_ = self.GeoCertified_nsprefix_ + ':' if (UseCapturedNS_ and self.GeoCertified_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sGeoCertified>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.GeoCertified, input_name='GeoCertified'), namespaceprefix_ , eol_)) + if self.EventCode is not None: + namespaceprefix_ = self.EventCode_nsprefix_ + ':' if (UseCapturedNS_ and self.EventCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sEventCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EventCode), input_name='EventCode')), namespaceprefix_ , eol_)) + if self.ActionCode is not None: + namespaceprefix_ = self.ActionCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ActionCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sActionCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ActionCode), input_name='ActionCode')), namespaceprefix_ , eol_)) + if self.ReasonCode is not None: + namespaceprefix_ = self.ReasonCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ReasonCode_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sReasonCode>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ReasonCode), input_name='ReasonCode')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'EventTime': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EventTime') + value_ = self.gds_validate_string(value_, node, 'EventTime') + self.EventTime = value_ + self.EventTime_nsprefix_ = child_.prefix + elif nodeName_ == 'EventDate': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EventDate') + value_ = self.gds_validate_string(value_, node, 'EventDate') + self.EventDate = value_ + self.EventDate_nsprefix_ = child_.prefix + elif nodeName_ == 'Event': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Event') + value_ = self.gds_validate_string(value_, node, 'Event') + self.Event = value_ + self.Event_nsprefix_ = child_.prefix + elif nodeName_ == 'EventCity': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EventCity') + value_ = self.gds_validate_string(value_, node, 'EventCity') + self.EventCity = value_ + self.EventCity_nsprefix_ = child_.prefix + elif nodeName_ == 'EventState': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EventState') + value_ = self.gds_validate_string(value_, node, 'EventState') + self.EventState = value_ + self.EventState_nsprefix_ = child_.prefix + elif nodeName_ == 'EventZIPCode' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'EventZIPCode') + ival_ = self.gds_validate_integer(ival_, node, 'EventZIPCode') + self.EventZIPCode = ival_ + self.EventZIPCode_nsprefix_ = child_.prefix + elif nodeName_ == 'EventCountry': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EventCountry') + value_ = self.gds_validate_string(value_, node, 'EventCountry') + self.EventCountry = value_ + self.EventCountry_nsprefix_ = child_.prefix + elif nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'Name': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Name') + value_ = self.gds_validate_string(value_, node, 'Name') + self.Name = value_ + self.Name_nsprefix_ = child_.prefix + elif nodeName_ == 'AuthorizedAgent': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'AuthorizedAgent') + ival_ = self.gds_validate_boolean(ival_, node, 'AuthorizedAgent') + self.AuthorizedAgent = ival_ + self.AuthorizedAgent_nsprefix_ = child_.prefix + elif nodeName_ == 'GeoCertified': + sval_ = child_.text + ival_ = self.gds_parse_boolean(sval_, node, 'GeoCertified') + ival_ = self.gds_validate_boolean(ival_, node, 'GeoCertified') + self.GeoCertified = ival_ + self.GeoCertified_nsprefix_ = child_.prefix + elif nodeName_ == 'EventCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'EventCode') + value_ = self.gds_validate_string(value_, node, 'EventCode') + self.EventCode = value_ + self.EventCode_nsprefix_ = child_.prefix + elif nodeName_ == 'ActionCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ActionCode') + value_ = self.gds_validate_string(value_, node, 'ActionCode') + self.ActionCode = value_ + self.ActionCode_nsprefix_ = child_.prefix + elif nodeName_ == 'ReasonCode': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'ReasonCode') + value_ = self.gds_validate_string(value_, node, 'ReasonCode') + self.ReasonCode = value_ + self.ReasonCode_nsprefix_ = child_.prefix +# end class TrackDetailType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'TrackResponse' + rootClass = TrackResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'TrackResponse' + rootClass = TrackResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'TrackResponse' + rootClass = TrackResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'TrackResponse' + rootClass = TrackResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from track_response import *\n\n') + sys.stdout.write('import track_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "EMAILType", + "EnabledNotificationRequestsType", + "SMSType", + "TrackDetailType", + "TrackInfoType", + "TrackResponse", + "TrackSummaryType" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/zip_code_lookup_request.py b/modules/connectors/usps_international/karrio/schemas/usps_international/zip_code_lookup_request.py new file mode 100644 index 0000000000..5accbc0770 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/zip_code_lookup_request.py @@ -0,0 +1,1542 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:09 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/zip_code_lookup_request.py') +# +# Command line arguments: +# ./schemas/ZipCodeLookupRequest.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/zip_code_lookup_request.py" ./schemas/ZipCodeLookupRequest.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class ZipCodeLookupRequest(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, USERID=None, PASSWORD=None, Address=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.USERID = _cast(None, USERID) + self.USERID_nsprefix_ = None + self.PASSWORD = _cast(None, PASSWORD) + self.PASSWORD_nsprefix_ = None + self.Address = Address + self.Address_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ZipCodeLookupRequest) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ZipCodeLookupRequest.subclass: + return ZipCodeLookupRequest.subclass(*args_, **kwargs_) + else: + return ZipCodeLookupRequest(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Address(self): + return self.Address + def set_Address(self, Address): + self.Address = Address + def get_USERID(self): + return self.USERID + def set_USERID(self, USERID): + self.USERID = USERID + def get_PASSWORD(self): + return self.PASSWORD + def set_PASSWORD(self, PASSWORD): + self.PASSWORD = PASSWORD + def has__content(self): + if ( + self.Address is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ZipCodeLookupRequest', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ZipCodeLookupRequest') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ZipCodeLookupRequest': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ZipCodeLookupRequest') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ZipCodeLookupRequest', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ZipCodeLookupRequest'): + if self.USERID is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + outfile.write(' USERID=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.USERID), input_name='USERID')), )) + if self.PASSWORD is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + outfile.write(' PASSWORD=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.PASSWORD), input_name='PASSWORD')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ZipCodeLookupRequest', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Address is not None: + namespaceprefix_ = self.Address_nsprefix_ + ':' if (UseCapturedNS_ and self.Address_nsprefix_) else '' + self.Address.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Address', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('USERID', node) + if value is not None and 'USERID' not in already_processed: + already_processed.add('USERID') + self.USERID = value + value = find_attr_value_('PASSWORD', node) + if value is not None and 'PASSWORD' not in already_processed: + already_processed.add('PASSWORD') + self.PASSWORD = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Address': + obj_ = AddressType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Address = obj_ + obj_.original_tagname_ = 'Address' +# end class ZipCodeLookupRequest + + +class AddressType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ID=None, Address1=None, Address2=None, FirmName=None, City=None, State=None, Zip5=None, Zip4=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ID = _cast(int, ID) + self.ID_nsprefix_ = None + self.Address1 = Address1 + self.Address1_nsprefix_ = None + self.Address2 = Address2 + self.Address2_nsprefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.Zip5 = Zip5 + self.Zip5_nsprefix_ = None + self.Zip4 = Zip4 + self.Zip4_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, AddressType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if AddressType.subclass: + return AddressType.subclass(*args_, **kwargs_) + else: + return AddressType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Address1(self): + return self.Address1 + def set_Address1(self, Address1): + self.Address1 = Address1 + def get_Address2(self): + return self.Address2 + def set_Address2(self, Address2): + self.Address2 = Address2 + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_Zip5(self): + return self.Zip5 + def set_Zip5(self, Zip5): + self.Zip5 = Zip5 + def get_Zip4(self): + return self.Zip4 + def set_Zip4(self, Zip4): + self.Zip4 = Zip4 + def get_ID(self): + return self.ID + def set_ID(self, ID): + self.ID = ID + def has__content(self): + if ( + self.Address1 is not None or + self.Address2 is not None or + self.FirmName is not None or + self.City is not None or + self.State is not None or + self.Zip5 is not None or + self.Zip4 is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AddressType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('AddressType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'AddressType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AddressType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AddressType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AddressType'): + if self.ID is not None and 'ID' not in already_processed: + already_processed.add('ID') + outfile.write(' ID="%s"' % self.gds_format_integer(self.ID, input_name='ID')) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AddressType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Address1 is not None: + namespaceprefix_ = self.Address1_nsprefix_ + ':' if (UseCapturedNS_ and self.Address1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address1), input_name='Address1')), namespaceprefix_ , eol_)) + if self.Address2 is not None: + namespaceprefix_ = self.Address2_nsprefix_ + ':' if (UseCapturedNS_ and self.Address2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address2), input_name='Address2')), namespaceprefix_ , eol_)) + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.Zip5 is not None: + namespaceprefix_ = self.Zip5_nsprefix_ + ':' if (UseCapturedNS_ and self.Zip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZip5>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Zip5, input_name='Zip5'), namespaceprefix_ , eol_)) + if self.Zip4 is not None: + namespaceprefix_ = self.Zip4_nsprefix_ + ':' if (UseCapturedNS_ and self.Zip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZip4>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Zip4), input_name='Zip4')), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ID', node) + if value is not None and 'ID' not in already_processed: + already_processed.add('ID') + self.ID = self.gds_parse_integer(value, node, 'ID') + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Address1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address1') + value_ = self.gds_validate_string(value_, node, 'Address1') + self.Address1 = value_ + self.Address1_nsprefix_ = child_.prefix + elif nodeName_ == 'Address2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address2') + value_ = self.gds_validate_string(value_, node, 'Address2') + self.Address2 = value_ + self.Address2_nsprefix_ = child_.prefix + elif nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'Zip5' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Zip5') + ival_ = self.gds_validate_integer(ival_, node, 'Zip5') + self.Zip5 = ival_ + self.Zip5_nsprefix_ = child_.prefix + elif nodeName_ == 'Zip4': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Zip4') + value_ = self.gds_validate_string(value_, node, 'Zip4') + self.Zip4 = value_ + self.Zip4_nsprefix_ = child_.prefix +# end class AddressType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ZipCodeLookupRequest' + rootClass = ZipCodeLookupRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ZipCodeLookupRequest' + rootClass = ZipCodeLookupRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ZipCodeLookupRequest' + rootClass = ZipCodeLookupRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ZipCodeLookupRequest' + rootClass = ZipCodeLookupRequest + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from zip_code_lookup_request import *\n\n') + sys.stdout.write('import zip_code_lookup_request as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "AddressType", + "ZipCodeLookupRequest" +] diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/zip_code_lookup_response.py b/modules/connectors/usps_international/karrio/schemas/usps_international/zip_code_lookup_response.py new file mode 100644 index 0000000000..0a785d1b96 --- /dev/null +++ b/modules/connectors/usps_international/karrio/schemas/usps_international/zip_code_lookup_response.py @@ -0,0 +1,1544 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Apr 3 21:09:09 2024 by generateDS.py version 2.43.3. +# Python 3.10.13 (main, Mar 21 2024, 17:51:02) [GCC 9.4.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('-o', './karrio/schemas/usps_international/zip_code_lookup_response.py') +# +# Command line arguments: +# ./schemas/ZipCodeLookupResponse.xsd +# +# Command line: +# /workspaces/karrio/.venv/karrio/bin/generateDS --no-namespace-defs -o "./karrio/schemas/usps_international/zip_code_lookup_response.py" ./schemas/ZipCodeLookupResponse.xsd +# +# Current working directory (os.getcwd()): +# usps_international +# + +import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os +import re as re_ +import base64 +import datetime as datetime_ +import decimal as decimal_ +from lxml import etree as etree_ + + +Validate_simpletypes_ = True +SaveElementTreeNode = True +TagNamePrefix = "" +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ModulenotfoundExp_ : + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def __str__(self): + settings = { + 'str_pretty_print': True, + 'str_indent_level': 0, + 'str_namespaceprefix': '', + 'str_name': self.__class__.__name__, + 'str_namespacedefs': '', + } + for n in settings: + if hasattr(self, n): + settings[n] = getattr(self, n) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + output = StringIO() + self.export( + output, + settings['str_indent_level'], + pretty_print=settings['str_pretty_print'], + namespaceprefix_=settings['str_namespaceprefix'], + name_=settings['str_name'], + namespacedef_=settings['str_namespacedefs'] + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data).decode('ascii') + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % int(input_data) + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival + def gds_validate_integer(self, input_data, node=None, input_name=''): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value + def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integer values') + return values + def gds_format_float(self, input_data, input_name=''): + value = ('%.15f' % float(input_data)).rstrip('0') + if value.endswith('.'): + value += '0' + return value + + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ + def gds_validate_float(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value + def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') + return values + def gds_format_double(self, input_data, input_name=''): + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ + def gds_validate_double(self, input_data, node=None, input_name=''): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value + def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error( + node, 'Requires sequence of double or float values') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + input_data = input_data.strip() + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval + def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + target = str(target) + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') + if type(self) != type(other): + return False + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) + def __ne__(self, other): + return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + s1 = s1.replace('\n', ' ') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +def encode_str_2_3(instr): + return instr + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + + +# +# Start enum classes +# +# +# Start data representation classes +# +class ZipCodeLookupResponse(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, Address=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + if Address is None: + self.Address = [] + else: + self.Address = Address + self.Address_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ZipCodeLookupResponse) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ZipCodeLookupResponse.subclass: + return ZipCodeLookupResponse.subclass(*args_, **kwargs_) + else: + return ZipCodeLookupResponse(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Address(self): + return self.Address + def set_Address(self, Address): + self.Address = Address + def add_Address(self, value): + self.Address.append(value) + def insert_Address_at(self, index, value): + self.Address.insert(index, value) + def replace_Address_at(self, index, value): + self.Address[index] = value + def has__content(self): + if ( + self.Address + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ZipCodeLookupResponse', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ZipCodeLookupResponse') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'ZipCodeLookupResponse': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ZipCodeLookupResponse') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ZipCodeLookupResponse', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ZipCodeLookupResponse'): + pass + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ZipCodeLookupResponse', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for Address_ in self.Address: + namespaceprefix_ = self.Address_nsprefix_ + ':' if (UseCapturedNS_ and self.Address_nsprefix_) else '' + Address_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Address', pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + pass + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Address': + obj_ = AddressType.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.Address.append(obj_) + obj_.original_tagname_ = 'Address' +# end class ZipCodeLookupResponse + + +class AddressType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ + subclass = None + superclass = None + def __init__(self, ID=None, Address1=None, Address2=None, City=None, FirmName=None, State=None, Urbanization=None, Zip5=None, Zip4=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None + self.ID = _cast(int, ID) + self.ID_nsprefix_ = None + self.Address1 = Address1 + self.Address1_nsprefix_ = None + self.Address2 = Address2 + self.Address2_nsprefix_ = None + self.City = City + self.City_nsprefix_ = None + self.FirmName = FirmName + self.FirmName_nsprefix_ = None + self.State = State + self.State_nsprefix_ = None + self.Urbanization = Urbanization + self.Urbanization_nsprefix_ = None + self.Zip5 = Zip5 + self.Zip5_nsprefix_ = None + self.Zip4 = Zip4 + self.Zip4_nsprefix_ = None + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, AddressType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if AddressType.subclass: + return AddressType.subclass(*args_, **kwargs_) + else: + return AddressType(*args_, **kwargs_) + factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Address1(self): + return self.Address1 + def set_Address1(self, Address1): + self.Address1 = Address1 + def get_Address2(self): + return self.Address2 + def set_Address2(self, Address2): + self.Address2 = Address2 + def get_City(self): + return self.City + def set_City(self, City): + self.City = City + def get_FirmName(self): + return self.FirmName + def set_FirmName(self, FirmName): + self.FirmName = FirmName + def get_State(self): + return self.State + def set_State(self, State): + self.State = State + def get_Urbanization(self): + return self.Urbanization + def set_Urbanization(self, Urbanization): + self.Urbanization = Urbanization + def get_Zip5(self): + return self.Zip5 + def set_Zip5(self, Zip5): + self.Zip5 = Zip5 + def get_Zip4(self): + return self.Zip4 + def set_Zip4(self, Zip4): + self.Zip4 = Zip4 + def get_ID(self): + return self.ID + def set_ID(self, ID): + self.ID = ID + def has__content(self): + if ( + self.Address1 is not None or + self.Address2 is not None or + self.City is not None or + self.FirmName is not None or + self.State is not None or + self.Urbanization is not None or + self.Zip5 is not None or + self.Zip4 is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AddressType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('AddressType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None and name_ == 'AddressType': + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AddressType') + if self.has__content(): + outfile.write('>%s' % (eol_, )) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AddressType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AddressType'): + if self.ID is not None and 'ID' not in already_processed: + already_processed.add('ID') + outfile.write(' ID="%s"' % self.gds_format_integer(self.ID, input_name='ID')) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AddressType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Address1 is not None: + namespaceprefix_ = self.Address1_nsprefix_ + ':' if (UseCapturedNS_ and self.Address1_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress1>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address1), input_name='Address1')), namespaceprefix_ , eol_)) + if self.Address2 is not None: + namespaceprefix_ = self.Address2_nsprefix_ + ':' if (UseCapturedNS_ and self.Address2_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sAddress2>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Address2), input_name='Address2')), namespaceprefix_ , eol_)) + if self.City is not None: + namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sCity>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_)) + if self.FirmName is not None: + namespaceprefix_ = self.FirmName_nsprefix_ + ':' if (UseCapturedNS_ and self.FirmName_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sFirmName>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FirmName), input_name='FirmName')), namespaceprefix_ , eol_)) + if self.State is not None: + namespaceprefix_ = self.State_nsprefix_ + ':' if (UseCapturedNS_ and self.State_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sState>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.State), input_name='State')), namespaceprefix_ , eol_)) + if self.Urbanization is not None: + namespaceprefix_ = self.Urbanization_nsprefix_ + ':' if (UseCapturedNS_ and self.Urbanization_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sUrbanization>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Urbanization), input_name='Urbanization')), namespaceprefix_ , eol_)) + if self.Zip5 is not None: + namespaceprefix_ = self.Zip5_nsprefix_ + ':' if (UseCapturedNS_ and self.Zip5_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZip5>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Zip5, input_name='Zip5'), namespaceprefix_ , eol_)) + if self.Zip4 is not None: + namespaceprefix_ = self.Zip4_nsprefix_ + ':' if (UseCapturedNS_ and self.Zip4_nsprefix_) else '' + showIndent(outfile, level, pretty_print) + outfile.write('<%sZip4>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Zip4, input_name='Zip4'), namespaceprefix_ , eol_)) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('ID', node) + if value is not None and 'ID' not in already_processed: + already_processed.add('ID') + self.ID = self.gds_parse_integer(value, node, 'ID') + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + if nodeName_ == 'Address1': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address1') + value_ = self.gds_validate_string(value_, node, 'Address1') + self.Address1 = value_ + self.Address1_nsprefix_ = child_.prefix + elif nodeName_ == 'Address2': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Address2') + value_ = self.gds_validate_string(value_, node, 'Address2') + self.Address2 = value_ + self.Address2_nsprefix_ = child_.prefix + elif nodeName_ == 'City': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'City') + value_ = self.gds_validate_string(value_, node, 'City') + self.City = value_ + self.City_nsprefix_ = child_.prefix + elif nodeName_ == 'FirmName': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'FirmName') + value_ = self.gds_validate_string(value_, node, 'FirmName') + self.FirmName = value_ + self.FirmName_nsprefix_ = child_.prefix + elif nodeName_ == 'State': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'State') + value_ = self.gds_validate_string(value_, node, 'State') + self.State = value_ + self.State_nsprefix_ = child_.prefix + elif nodeName_ == 'Urbanization': + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'Urbanization') + value_ = self.gds_validate_string(value_, node, 'Urbanization') + self.Urbanization = value_ + self.Urbanization_nsprefix_ = child_.prefix + elif nodeName_ == 'Zip5' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Zip5') + ival_ = self.gds_validate_integer(ival_, node, 'Zip5') + self.Zip5 = ival_ + self.Zip5_nsprefix_ = child_.prefix + elif nodeName_ == 'Zip4' and child_.text: + sval_ = child_.text + ival_ = self.gds_parse_integer(sval_, node, 'Zip4') + ival_ = self.gds_validate_integer(ival_, node, 'Zip4') + self.Zip4 = ival_ + self.Zip4_nsprefix_ = child_.prefix +# end class AddressType + + +# +# End data representation classes. +# + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + prefix_tag = TagNamePrefix + tag + rootClass = GDSClassesMapping.get(prefix_tag) + if rootClass is None: + rootClass = globals().get(prefix_tag) + return tag, rootClass + + +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ZipCodeLookupResponse' + rootClass = ZipCodeLookupResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_=namespacedefs, + pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ZipCodeLookupResponse' + rootClass = ZipCodeLookupResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(str(content)) + sys.stdout.write('\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ZipCodeLookupResponse' + rootClass = ZipCodeLookupResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def parseLiteral(inFileName, silence=False, print_warnings=True): + parser = None + doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'ZipCodeLookupResponse' + rootClass = ZipCodeLookupResponse + rootObj = rootClass.factory() + rootObj.build(rootNode, gds_collector_=gds_collector) + # Enable Python to collect the space used by the DOM. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('#from zip_code_lookup_response import *\n\n') + sys.stdout.write('import zip_code_lookup_response as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {} + +__all__ = [ + "AddressType", + "ZipCodeLookupResponse" +] diff --git a/modules/connectors/usps_international/schemas/AddressValidateRequest.xsd b/modules/connectors/usps_international/schemas/AddressValidateRequest.xsd new file mode 100644 index 0000000000..83a1ff85d6 --- /dev/null +++ b/modules/connectors/usps_international/schemas/AddressValidateRequest.xsd @@ -0,0 +1,25 @@ + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/AddressValidateResponse.xsd b/modules/connectors/usps_international/schemas/AddressValidateResponse.xsd new file mode 100644 index 0000000000..cf6a471ecc --- /dev/null +++ b/modules/connectors/usps_international/schemas/AddressValidateResponse.xsd @@ -0,0 +1,34 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/CarrierPickupAvailabilityRequest.xsd b/modules/connectors/usps_international/schemas/CarrierPickupAvailabilityRequest.xsd new file mode 100644 index 0000000000..31e9a74351 --- /dev/null +++ b/modules/connectors/usps_international/schemas/CarrierPickupAvailabilityRequest.xsd @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/CarrierPickupAvailabilityResponse.xsd b/modules/connectors/usps_international/schemas/CarrierPickupAvailabilityResponse.xsd new file mode 100644 index 0000000000..248c141b92 --- /dev/null +++ b/modules/connectors/usps_international/schemas/CarrierPickupAvailabilityResponse.xsd @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/CarrierPickupCancelRequest.xsd b/modules/connectors/usps_international/schemas/CarrierPickupCancelRequest.xsd new file mode 100644 index 0000000000..8b20e36d6d --- /dev/null +++ b/modules/connectors/usps_international/schemas/CarrierPickupCancelRequest.xsd @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/CarrierPickupCancelResponse.xsd b/modules/connectors/usps_international/schemas/CarrierPickupCancelResponse.xsd new file mode 100644 index 0000000000..3200cb85ef --- /dev/null +++ b/modules/connectors/usps_international/schemas/CarrierPickupCancelResponse.xsd @@ -0,0 +1,35 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/CarrierPickupChangeRequest.xsd b/modules/connectors/usps_international/schemas/CarrierPickupChangeRequest.xsd new file mode 100644 index 0000000000..f744949cb1 --- /dev/null +++ b/modules/connectors/usps_international/schemas/CarrierPickupChangeRequest.xsd @@ -0,0 +1,35 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/CarrierPickupChangeResponse.xsd b/modules/connectors/usps_international/schemas/CarrierPickupChangeResponse.xsd new file mode 100644 index 0000000000..e95b729ece --- /dev/null +++ b/modules/connectors/usps_international/schemas/CarrierPickupChangeResponse.xsd @@ -0,0 +1,35 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/CarrierPickupInquiryRequest.xsd b/modules/connectors/usps_international/schemas/CarrierPickupInquiryRequest.xsd new file mode 100644 index 0000000000..e417cb1421 --- /dev/null +++ b/modules/connectors/usps_international/schemas/CarrierPickupInquiryRequest.xsd @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/CarrierPickupInquiryResponse.xsd b/modules/connectors/usps_international/schemas/CarrierPickupInquiryResponse.xsd new file mode 100644 index 0000000000..ddd5162dcb --- /dev/null +++ b/modules/connectors/usps_international/schemas/CarrierPickupInquiryResponse.xsd @@ -0,0 +1,35 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/CarrierPickupScheduleRequest.xsd b/modules/connectors/usps_international/schemas/CarrierPickupScheduleRequest.xsd new file mode 100644 index 0000000000..ecf00cdd76 --- /dev/null +++ b/modules/connectors/usps_international/schemas/CarrierPickupScheduleRequest.xsd @@ -0,0 +1,34 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/CarrierPickupScheduleResponse.xsd b/modules/connectors/usps_international/schemas/CarrierPickupScheduleResponse.xsd new file mode 100644 index 0000000000..bffdac0d99 --- /dev/null +++ b/modules/connectors/usps_international/schemas/CarrierPickupScheduleResponse.xsd @@ -0,0 +1,35 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/CityStateLookupRequest.xsd b/modules/connectors/usps_international/schemas/CityStateLookupRequest.xsd new file mode 100644 index 0000000000..2e2a2ae9bc --- /dev/null +++ b/modules/connectors/usps_international/schemas/CityStateLookupRequest.xsd @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/CityStateLookupResponse.xsd b/modules/connectors/usps_international/schemas/CityStateLookupResponse.xsd new file mode 100644 index 0000000000..b2937464a0 --- /dev/null +++ b/modules/connectors/usps_international/schemas/CityStateLookupResponse.xsd @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/EMRSV4.0BulkRequest.xsd b/modules/connectors/usps_international/schemas/EMRSV4.0BulkRequest.xsd new file mode 100644 index 0000000000..31669dd41e --- /dev/null +++ b/modules/connectors/usps_international/schemas/EMRSV4.0BulkRequest.xsd @@ -0,0 +1,41 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/Error.xsd b/modules/connectors/usps_international/schemas/Error.xsd new file mode 100644 index 0000000000..447ae72804 --- /dev/null +++ b/modules/connectors/usps_international/schemas/Error.xsd @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/ExpressMailCommitmentRequest.xsd b/modules/connectors/usps_international/schemas/ExpressMailCommitmentRequest.xsd new file mode 100644 index 0000000000..7a166df396 --- /dev/null +++ b/modules/connectors/usps_international/schemas/ExpressMailCommitmentRequest.xsd @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/ExpressMailCommitmentResponse.xsd b/modules/connectors/usps_international/schemas/ExpressMailCommitmentResponse.xsd new file mode 100644 index 0000000000..24d8ebbda3 --- /dev/null +++ b/modules/connectors/usps_international/schemas/ExpressMailCommitmentResponse.xsd @@ -0,0 +1,41 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/FirstClassMailRequest.xsd b/modules/connectors/usps_international/schemas/FirstClassMailRequest.xsd new file mode 100644 index 0000000000..610b32a3a6 --- /dev/null +++ b/modules/connectors/usps_international/schemas/FirstClassMailRequest.xsd @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/FirstClassMailResponse.xsd b/modules/connectors/usps_international/schemas/FirstClassMailResponse.xsd new file mode 100644 index 0000000000..79bc0cbc0b --- /dev/null +++ b/modules/connectors/usps_international/schemas/FirstClassMailResponse.xsd @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/HFPFacilityInfoRequest.xsd b/modules/connectors/usps_international/schemas/HFPFacilityInfoRequest.xsd new file mode 100644 index 0000000000..70cfec3227 --- /dev/null +++ b/modules/connectors/usps_international/schemas/HFPFacilityInfoRequest.xsd @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/HFPFacilityInfoResponse.xsd b/modules/connectors/usps_international/schemas/HFPFacilityInfoResponse.xsd new file mode 100644 index 0000000000..45f171ca60 --- /dev/null +++ b/modules/connectors/usps_international/schemas/HFPFacilityInfoResponse.xsd @@ -0,0 +1,27 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/IntlRateV2Request.xsd b/modules/connectors/usps_international/schemas/IntlRateV2Request.xsd new file mode 100644 index 0000000000..e5414ec1ee --- /dev/null +++ b/modules/connectors/usps_international/schemas/IntlRateV2Request.xsd @@ -0,0 +1,73 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/IntlRateV2Response.xsd b/modules/connectors/usps_international/schemas/IntlRateV2Response.xsd new file mode 100644 index 0000000000..4b43fbc858 --- /dev/null +++ b/modules/connectors/usps_international/schemas/IntlRateV2Response.xsd @@ -0,0 +1,104 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/MRSV4.0Request.xsd b/modules/connectors/usps_international/schemas/MRSV4.0Request.xsd new file mode 100644 index 0000000000..961ee9b08b --- /dev/null +++ b/modules/connectors/usps_international/schemas/MRSV4.0Request.xsd @@ -0,0 +1,41 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/PTSEmailRequest.xsd b/modules/connectors/usps_international/schemas/PTSEmailRequest.xsd new file mode 100644 index 0000000000..bf47c328c4 --- /dev/null +++ b/modules/connectors/usps_international/schemas/PTSEmailRequest.xsd @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/PTSEmailResult.xsd b/modules/connectors/usps_international/schemas/PTSEmailResult.xsd new file mode 100644 index 0000000000..bc7f37333b --- /dev/null +++ b/modules/connectors/usps_international/schemas/PTSEmailResult.xsd @@ -0,0 +1,11 @@ + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/PTSPODRequest.xsd b/modules/connectors/usps_international/schemas/PTSPODRequest.xsd new file mode 100644 index 0000000000..3930a440e1 --- /dev/null +++ b/modules/connectors/usps_international/schemas/PTSPODRequest.xsd @@ -0,0 +1,30 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/PTSPODResult.xsd b/modules/connectors/usps_international/schemas/PTSPODResult.xsd new file mode 100644 index 0000000000..cad85f5c0a --- /dev/null +++ b/modules/connectors/usps_international/schemas/PTSPODResult.xsd @@ -0,0 +1,11 @@ + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/PTSRRERequest.xsd b/modules/connectors/usps_international/schemas/PTSRRERequest.xsd new file mode 100644 index 0000000000..902c799900 --- /dev/null +++ b/modules/connectors/usps_international/schemas/PTSRRERequest.xsd @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/PTSRREResult.xsd b/modules/connectors/usps_international/schemas/PTSRREResult.xsd new file mode 100644 index 0000000000..8e6a920c77 --- /dev/null +++ b/modules/connectors/usps_international/schemas/PTSRREResult.xsd @@ -0,0 +1,11 @@ + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/PTSTPODResult.xsd b/modules/connectors/usps_international/schemas/PTSTPODResult.xsd new file mode 100644 index 0000000000..a7e1f911e4 --- /dev/null +++ b/modules/connectors/usps_international/schemas/PTSTPODResult.xsd @@ -0,0 +1,11 @@ + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/PTSTPodRequest.xsd b/modules/connectors/usps_international/schemas/PTSTPodRequest.xsd new file mode 100644 index 0000000000..6be82855e6 --- /dev/null +++ b/modules/connectors/usps_international/schemas/PTSTPodRequest.xsd @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/PriorityMailRequest.xsd b/modules/connectors/usps_international/schemas/PriorityMailRequest.xsd new file mode 100644 index 0000000000..80d0969b04 --- /dev/null +++ b/modules/connectors/usps_international/schemas/PriorityMailRequest.xsd @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/PriorityMailResponse.xsd b/modules/connectors/usps_international/schemas/PriorityMailResponse.xsd new file mode 100644 index 0000000000..0efd8d5dee --- /dev/null +++ b/modules/connectors/usps_international/schemas/PriorityMailResponse.xsd @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/RateV4Request.xsd b/modules/connectors/usps_international/schemas/RateV4Request.xsd new file mode 100644 index 0000000000..fb428e27d3 --- /dev/null +++ b/modules/connectors/usps_international/schemas/RateV4Request.xsd @@ -0,0 +1,81 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/RateV4Response.xsd b/modules/connectors/usps_international/schemas/RateV4Response.xsd new file mode 100644 index 0000000000..2a98ba9448 --- /dev/null +++ b/modules/connectors/usps_international/schemas/RateV4Response.xsd @@ -0,0 +1,85 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/SCANRequest.xsd b/modules/connectors/usps_international/schemas/SCANRequest.xsd new file mode 100644 index 0000000000..33e49c0652 --- /dev/null +++ b/modules/connectors/usps_international/schemas/SCANRequest.xsd @@ -0,0 +1,44 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/SCANResponse.xsd b/modules/connectors/usps_international/schemas/SCANResponse.xsd new file mode 100644 index 0000000000..f20e2ab8f9 --- /dev/null +++ b/modules/connectors/usps_international/schemas/SCANResponse.xsd @@ -0,0 +1,32 @@ + + + + + + + + + + + + + + + + + 65255 skipped + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/SDCGetLocationsRequest.xsd b/modules/connectors/usps_international/schemas/SDCGetLocationsRequest.xsd new file mode 100644 index 0000000000..f14b908755 --- /dev/null +++ b/modules/connectors/usps_international/schemas/SDCGetLocationsRequest.xsd @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/SDCGetLocationsResponse.xsd b/modules/connectors/usps_international/schemas/SDCGetLocationsResponse.xsd new file mode 100644 index 0000000000..2d6234bb0e --- /dev/null +++ b/modules/connectors/usps_international/schemas/SDCGetLocationsResponse.xsd @@ -0,0 +1,124 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/StandardBRequest.xsd b/modules/connectors/usps_international/schemas/StandardBRequest.xsd new file mode 100644 index 0000000000..dc5879b3d5 --- /dev/null +++ b/modules/connectors/usps_international/schemas/StandardBRequest.xsd @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/StandardBResponse.xsd b/modules/connectors/usps_international/schemas/StandardBResponse.xsd new file mode 100644 index 0000000000..9671fcfd48 --- /dev/null +++ b/modules/connectors/usps_international/schemas/StandardBResponse.xsd @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/TrackFieldRequest.xsd b/modules/connectors/usps_international/schemas/TrackFieldRequest.xsd new file mode 100644 index 0000000000..ad0236d0a1 --- /dev/null +++ b/modules/connectors/usps_international/schemas/TrackFieldRequest.xsd @@ -0,0 +1,28 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/TrackRequest.xsd b/modules/connectors/usps_international/schemas/TrackRequest.xsd new file mode 100644 index 0000000000..55350d7e45 --- /dev/null +++ b/modules/connectors/usps_international/schemas/TrackRequest.xsd @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/TrackResponse.xsd b/modules/connectors/usps_international/schemas/TrackResponse.xsd new file mode 100644 index 0000000000..11020f5e11 --- /dev/null +++ b/modules/connectors/usps_international/schemas/TrackResponse.xsd @@ -0,0 +1,132 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/ZipCodeLookupRequest.xsd b/modules/connectors/usps_international/schemas/ZipCodeLookupRequest.xsd new file mode 100644 index 0000000000..59f44652c0 --- /dev/null +++ b/modules/connectors/usps_international/schemas/ZipCodeLookupRequest.xsd @@ -0,0 +1,25 @@ + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/ZipCodeLookupResponse.xsd b/modules/connectors/usps_international/schemas/ZipCodeLookupResponse.xsd new file mode 100644 index 0000000000..186c913b54 --- /dev/null +++ b/modules/connectors/usps_international/schemas/ZipCodeLookupResponse.xsd @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/eVSCancelRequest.xsd b/modules/connectors/usps_international/schemas/eVSCancelRequest.xsd new file mode 100644 index 0000000000..ccb9c0c359 --- /dev/null +++ b/modules/connectors/usps_international/schemas/eVSCancelRequest.xsd @@ -0,0 +1,11 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/eVSCancelResponse.xsd b/modules/connectors/usps_international/schemas/eVSCancelResponse.xsd new file mode 100644 index 0000000000..949de222bf --- /dev/null +++ b/modules/connectors/usps_international/schemas/eVSCancelResponse.xsd @@ -0,0 +1,11 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/eVSExpressMailIntlRequest.xsd b/modules/connectors/usps_international/schemas/eVSExpressMailIntlRequest.xsd new file mode 100644 index 0000000000..d685372ca2 --- /dev/null +++ b/modules/connectors/usps_international/schemas/eVSExpressMailIntlRequest.xsd @@ -0,0 +1,131 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/eVSExpressMailIntlResponse.xsd b/modules/connectors/usps_international/schemas/eVSExpressMailIntlResponse.xsd new file mode 100644 index 0000000000..2487d871e5 --- /dev/null +++ b/modules/connectors/usps_international/schemas/eVSExpressMailIntlResponse.xsd @@ -0,0 +1,27 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/eVSFirstClassMailIntlRequest.xsd b/modules/connectors/usps_international/schemas/eVSFirstClassMailIntlRequest.xsd new file mode 100644 index 0000000000..37caf8704a --- /dev/null +++ b/modules/connectors/usps_international/schemas/eVSFirstClassMailIntlRequest.xsd @@ -0,0 +1,121 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/eVSFirstClassMailIntlResponse.xsd b/modules/connectors/usps_international/schemas/eVSFirstClassMailIntlResponse.xsd new file mode 100644 index 0000000000..3da7c9b9da --- /dev/null +++ b/modules/connectors/usps_international/schemas/eVSFirstClassMailIntlResponse.xsd @@ -0,0 +1,36 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/eVSGXGGetLabelRequest.xsd b/modules/connectors/usps_international/schemas/eVSGXGGetLabelRequest.xsd new file mode 100644 index 0000000000..61dc1cc993 --- /dev/null +++ b/modules/connectors/usps_international/schemas/eVSGXGGetLabelRequest.xsd @@ -0,0 +1,132 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/eVSGXGGetLabelResponse.xsd b/modules/connectors/usps_international/schemas/eVSGXGGetLabelResponse.xsd new file mode 100644 index 0000000000..083f648324 --- /dev/null +++ b/modules/connectors/usps_international/schemas/eVSGXGGetLabelResponse.xsd @@ -0,0 +1,33 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/eVSICancelRequest.xsd b/modules/connectors/usps_international/schemas/eVSICancelRequest.xsd new file mode 100644 index 0000000000..d1d6039c0e --- /dev/null +++ b/modules/connectors/usps_international/schemas/eVSICancelRequest.xsd @@ -0,0 +1,12 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/eVSICancelResponse.xsd b/modules/connectors/usps_international/schemas/eVSICancelResponse.xsd new file mode 100644 index 0000000000..24d139a32f --- /dev/null +++ b/modules/connectors/usps_international/schemas/eVSICancelResponse.xsd @@ -0,0 +1,12 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/eVSPriorityMailIntlRequest.xsd b/modules/connectors/usps_international/schemas/eVSPriorityMailIntlRequest.xsd new file mode 100644 index 0000000000..ae807f0a37 --- /dev/null +++ b/modules/connectors/usps_international/schemas/eVSPriorityMailIntlRequest.xsd @@ -0,0 +1,137 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/modules/connectors/usps_international/schemas/eVSPriorityMailIntlResponse.xsd b/modules/connectors/usps_international/schemas/eVSPriorityMailIntlResponse.xsd new file mode 100644 index 0000000000..faca50a436 --- /dev/null +++ b/modules/connectors/usps_international/schemas/eVSPriorityMailIntlResponse.xsd @@ -0,0 +1,41 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/schemas/eVSRequest.xsd b/modules/connectors/usps_international/schemas/eVSRequest.xsd new file mode 100644 index 0000000000..2cdd3ff7f9 --- /dev/null +++ b/modules/connectors/usps_international/schemas/eVSRequest.xsd @@ -0,0 +1,177 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/modules/connectors/usps_international/schemas/eVSResponse.xsd b/modules/connectors/usps_international/schemas/eVSResponse.xsd new file mode 100644 index 0000000000..3420904cf6 --- /dev/null +++ b/modules/connectors/usps_international/schemas/eVSResponse.xsd @@ -0,0 +1,53 @@ + + + + + + + + over 115000 suppressed + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/modules/connectors/usps_international/setup.py b/modules/connectors/usps_international/setup.py index 5fd6386a3f..e746355ec6 100644 --- a/modules/connectors/usps_international/setup.py +++ b/modules/connectors/usps_international/setup.py @@ -1,5 +1,3 @@ -"""Warning: This setup.py is only there for git install until poetry support git subdirectory""" - from setuptools import setup, find_namespace_packages with open("README.md", "r") as fh: @@ -7,8 +5,8 @@ setup( name="karrio.usps_international", - version="2024.6-rc22", - description="Karrio - USPS Shipping Extension", + version="2024.6-rc9", + description="Karrio - USPS International Shipping extension", long_description=long_description, long_description_content_type="text/markdown", url="https://github.com/karrioapi/karrio", diff --git a/modules/connectors/usps_international/tests/__init__.py b/modules/connectors/usps_international/tests/__init__.py index 5419b0dcf3..cb8977e635 100644 --- a/modules/connectors/usps_international/tests/__init__.py +++ b/modules/connectors/usps_international/tests/__init__.py @@ -1,5 +1,3 @@ -from tests.usps_international.test_rate import * -from tests.usps_international.test_pickup import * from tests.usps_international.test_tracking import * +from tests.usps_international.test_rate import * from tests.usps_international.test_shipment import * -from tests.usps_international.test_manifest import * diff --git a/modules/connectors/usps_international/tests/usps_international/fixture.py b/modules/connectors/usps_international/tests/usps_international/fixture.py index aee635864d..4d84339bd7 100644 --- a/modules/connectors/usps_international/tests/usps_international/fixture.py +++ b/modules/connectors/usps_international/tests/usps_international/fixture.py @@ -1,33 +1,5 @@ import karrio -import datetime -import karrio.lib as lib - -expiry = datetime.datetime.now() + datetime.timedelta(days=1) -client_id = "client_id" -client_secret = "client_secret" -cached_auth = { - f"usps_international|{client_id}|{client_secret}": dict( - token_type="Bearer", - issued_at="1685542319575", - client_id=client_id, - access_token="access_token", - scope="addresses international-prices subscriptions payments pickup tracking labels scan-forms companies service-delivery-standards locations international-labels prices", - expires_in="14399", - refresh_count="0", - status="approved", - expiry=expiry.strftime("%Y-%m-%d %H:%M:%S"), - issuer="api.usps_international.com", - application_name="Silver Shipper Developer", - api_products="[Shipping-Silver]", - public_key="LS0tLS1CRUdJTiBQVUJMSUMgS0VZLS0tLS0KTUlJQklqQU5CZ2txaGtpRzl3MEJBUUVGQUFPQ0FROEFNSUlCQ2dLQ0FRRUF4QWxwZjNSNEE1S0lwZnhJVWk1bgpMTFByZjZVZTV3MktzeGxSVzE1UWV0UzBjWGVxaW9OT2hXbDNaaVhEWEdKT3ZuK3RoY0NWVVQ3WC9JZWYvTENZCkhUWk1kYUJOdW55VHEwT2RNZmVkUU8zYUNKZmwvUnJPTHYyaG9TRDR4U1YxRzFuTTc1RTlRYitFZ1p0cmFEUXoKNW42SXRpMUMzOHFGMjU5NVRHUWVUemx3Wk1LQng1VTY2bGwzNzlkZ2plTUJxS3ppVHZHWEpOdVg5ZzRrRlBIaApTLzNERm9FNkVFSW8zUHExeDlXTnRaSm93VkRwQUVZZTQ3SU1UdXJDN2NGcXp2d3M1b1BDRHQ4c083N2lUdDN0Cm1vK3NrM2ExWnZSaGs2WUQ3Zkt1UldQVzFEYUM4dC9pazlnWnhqQndYNlZsSUhDRzRZSHlYejZteWdGV09jMmEKOVFJREFRQUIKLS0tLS1FTkQgUFVCTElDIEtFWS0tLS0t", - ) -} gateway = karrio.gateway["usps_international"].create( - dict( - client_id="client_id", - client_secret="client_secret", - account_number="Your Account Number", - ), - cache=lib.Cache(**cached_auth), + {"username": "username", "password": "password", "mailer_id": "847654321"} ) diff --git a/modules/connectors/usps_international/tests/usps_international/test_rate.py b/modules/connectors/usps_international/tests/usps_international/test_rate.py index 49c6fef7c1..f1e061020a 100644 --- a/modules/connectors/usps_international/tests/usps_international/test_rate.py +++ b/modules/connectors/usps_international/tests/usps_international/test_rate.py @@ -1,169 +1,233 @@ +import re import unittest -from unittest.mock import patch, ANY +from unittest.mock import patch +from karrio.core.utils import DP +from karrio.core.models import RateRequest +from karrio import Rating from .fixture import gateway -from tests import logger - -import karrio -import karrio.lib as lib -import karrio.core.models as models class TestUSPSRating(unittest.TestCase): def setUp(self): self.maxDiff = None - self.RateRequest = models.RateRequest(**RatePayload) + self.RateRequest = RateRequest(**RATE_PAYLOAD) def test_create_rate_request(self): request = gateway.mapper.create_rate_request(self.RateRequest) - logger.debug(request.serialize()) - self.assertEqual(request.serialize(), RateRequest) + serialized_request = re.sub( + " [^>]+", + "", + request.serialize(), + ) + self.assertEqual(serialized_request, RATE_REQUEST_XML) - def test_get_rate(self): - with patch("karrio.mappers.usps_international.proxy.lib.request") as mock: - mock.return_value = "{}" - karrio.Rating.fetch(self.RateRequest).from_(gateway) + def test_parse_rate_response(self): + with patch("karrio.mappers.usps_international.proxy.http") as mock: + mock.return_value = RATE_RESPONSE_XML + parsed_response = Rating.fetch(self.RateRequest).from_(gateway).parse() - self.assertEqual( - mock.call_args[1]["url"], - f"{gateway.settings.server_url}/v3/total-rates/search", - ) + self.assertListEqual(DP.to_dict(parsed_response), PARSED_RATE_RESPONSE) - def test_parse_rate_response(self): - with patch("karrio.mappers.usps_international.proxy.lib.request") as mock: - mock.return_value = RateResponse - parsed_response = ( - karrio.Rating.fetch(self.RateRequest).from_(gateway).parse() - ) - logger.debug(lib.to_dict(parsed_response)) - self.assertListEqual(lib.to_dict(parsed_response), ParsedRateResponse) + def test_parse_rate_response_errors(self): + with patch("karrio.mappers.usps_international.proxy.http") as mock: + mock.return_value = ERROR_XML + parsed_response = Rating.fetch(self.RateRequest).from_(gateway).parse() + + self.assertListEqual(DP.to_dict(parsed_response), PARSED_ERRORS) if __name__ == "__main__": unittest.main() -RatePayload = { - "shipper": { - "company_name": "ABC Corp.", - "address_line1": "1098 N Fraser Street", - "city": "Georgetown", - "postal_code": "29440", - "country_code": "US", - "person_name": "Tall Tom", - "phone_number": "8005554526", - "state_code": "SC", - }, - "recipient": { - "company_name": "Coffee Five", - "address_line1": "R. da Quitanda, 86 - quiosque 01", - "city": "Centro", - "postal_code": "29440", - "country_code": "BR", - "person_name": "John", - "phone_number": "8005554526", - "state_code": "Rio de Janeiro", - }, +RATE_PAYLOAD = { + "shipper": {"postal_code": "18701"}, + "recipient": {"postal_code": "2046", "country_code": "AU"}, "parcels": [ { - "height": 50, - "length": 50, - "weight": 20, - "width": 12, - "dimension_unit": "CM", - "weight_unit": "KG", + "width": 10, + "height": 10, + "length": 10, + "weight": 3.123, + "weight_unit": "LB", + "dimension_unit": "IN", } ], - "options": { - "usps_label_delivery_service": True, - "usps_price_type": "RETAIL", - "shipment_date": "2024-07-28", - }, - "services": ["usps_parcel_select"], - "reference": "REF-001", + "options": {"usps_insurance_global_express_guaranteed": True}, } -ParsedRateResponse = [ +PARSED_RATE_RESPONSE = [ [ { "carrier_id": "usps_international", "carrier_name": "usps_international", "currency": "USD", "extra_charges": [ - {"amount": 3.35, "currency": "USD", "name": "Base Charge"}, - {"amount": 3.35, "currency": "USD", "name": "string"}, - {"amount": 3.35, "currency": "USD", "name": "Adult Signature Required"}, + {"amount": 115.9, "currency": "USD", "name": "Base charge"} ], - "meta": {"service_name": "usps_parcel_select", "zone": "01"}, - "service": "usps_parcel_select", - "total_charge": 3.35, - } + "meta": {"service_name": "usps_global_express_guaranteed_envelopes"}, + "service": "usps_global_express_guaranteed_envelopes", + "total_charge": 115.9, + }, + { + "carrier_id": "usps_international", + "carrier_name": "usps_international", + "currency": "USD", + "extra_charges": [ + {"amount": 82.45, "currency": "USD", "name": "Base charge"} + ], + "meta": {"service_name": "usps_priority_mail_express_international"}, + "service": "usps_priority_mail_express_international", + "total_charge": 82.45, + }, + { + "carrier_id": "usps_international", + "carrier_name": "usps_international", + "currency": "USD", + "extra_charges": [ + {"amount": 55.35, "currency": "USD", "name": "Base charge"} + ], + "meta": {"service_name": "usps_priority_mail_international"}, + "service": "usps_priority_mail_international", + "total_charge": 55.35, + }, ], [], ] - -RateRequest = [ - { - "accountNumber": "Your Account Number", - "accountType": "EPS", - "destinationCountryCode": "BR", - "extraServices": [415], - "foreignPostalCode": "29440", - "height": 19.69, - "itemValue": 0.0, - "length": 19.69, - "mailClass": "PARCEL_SELECT", - "mailingDate": "2024-07-28", - "originZIPCode": "29440", - "priceType": "RETAIL", - "weight": 44.1, - "width": 4.72, - } +PARSED_ERRORS = [ + [], + [ + { + "carrier_name": "usps_international", + "carrier_id": "usps_international", + "code": "-2147219037", + "message": "AcceptanceDateTime cannot be earlier than today's date.", + } + ], ] -RateResponse = """{ - "rateOptions": [ - { - "totalBasePrice": 3.35, - "rates": [ - { - "SKU": "DPXX0XXXXX07200", - "description": "string", - "priceType": "RETAIL", - "price": 3.35, - "weight": 5, - "dimWeight": 5, - "fees": [ - { - "name": "string", - "SKU": "string", - "price": 0 - } - ], - "startDate": "2021-07-16", - "endDate": "2021-07-16", - "mailClass": "PARCEL_SELECT", - "zone": "01" - } - ], - "extraServices": [ - { - "extraService": "922", - "name": "Adult Signature Required", - "SKU": "DPXX0XXXXX07200", - "priceType": "RETAIL", - "price": 3.35, - "warnings": [ - { - "warningCode": "string", - "warningDescription": "string" - } - ] - } - ], - "totalPrice": 3.35 - } - ] -} +ERROR_XML = """ + + + + -2147219037 + ;IntlRateV2.ProcessRequest + AcceptanceDateTime cannot be earlier than today's date. + + + + + +""" + +RATE_REQUEST_XML = """ + 2 + + 0 + 49.97 + false + PACKAGE + + Australia + 10 + 10 + 10 + 18701 + N + N + + 106 + + + 2046 + + +""" + +RATE_REQUEST = {"API": "IntlRateV2", "XML": RATE_REQUEST_XML} + +RATE_RESPONSE_XML = """ + + + Please reference Express Mail for Areas Served. + No Additional Restrictions Data found. + + 3 + 49.97 + false + PACKAGE + 10. + 10. + 10. + CANADA + 115.90 + + + 106 + Insurance + True + 0.00 + True + + + 100.00 + 1 - 3 business days to many major markets + USPS GXG&lt;sup&gt;&#8482;&lt;/sup&gt; Envelopes + USPS-Produced regular size cardboard envelope (12-1/2" x 9-1/2"), the legal-sized cardboard envelope (15" x 9-1/2") and the GXG Tyvek envelope (15-1/2" x 12-1/2") + 70 + + + 3 + 49.97 + false + PACKAGE + 10. + 10. + 10. + CANADA + 82.45 + + + 107 + Insurance + True + 0.00 + True + + + 100.00 + Wed, Jun 09, 2021 Guaranteed + Priority Mail Express International&lt;sup&gt;&#8482;&lt;/sup&gt; + Max. length 59", max. length plus girth 108" + 66 + + + 3 + 49.97 + false + PACKAGE + 10. + 10. + 10. + CANADA + 55.35 + + + 108 + Insurance + True + 0.00 + True + + + 100.00 + 6 - 10 business days to many major markets + Priority Mail International&lt;sup&gt;&#174;&lt;/sup&gt; + Max. length 79", max. length plus girth 108" + 66 + + + """ diff --git a/modules/connectors/usps_international/tests/usps_international/test_shipment/__init__.py b/modules/connectors/usps_international/tests/usps_international/test_shipment/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/modules/connectors/usps_international/tests/usps_international/test_shipment/test_first_class.py b/modules/connectors/usps_international/tests/usps_international/test_shipment/test_first_class.py new file mode 100644 index 0000000000..8f1541be1d --- /dev/null +++ b/modules/connectors/usps_international/tests/usps_international/test_shipment/test_first_class.py @@ -0,0 +1,198 @@ +import unittest +from unittest.mock import patch, ANY +import karrio +from karrio.core.utils import DP +from karrio.core.models import ShipmentRequest +from ..fixture import gateway + + +class TestUSPSFirstClassShipment(unittest.TestCase): + def setUp(self): + self.maxDiff = None + self.ShipmentRequest = ShipmentRequest(**shipment_data) + + def test_create_shipment_request(self): + requests = gateway.mapper.create_shipment_request(self.ShipmentRequest) + self.assertEqual(requests.serialize(), ShipmentRequestXML) + + # @patch("karrio.mappers.usps_international.proxy.http", return_value="") + # def test_create_shipment(self, http_mock): + # karrio.Shipment.create(self.ShipmentRequest).from_(gateway) + # + # url = http_mock.call_args[1]["url"] + # self.assertEqual( + # url, + # f"{gateway.settings.server_url}?{urllib.parse.urlencode(ShipmentRequestQuery)}", + # ) + + def test_parse_shipment_response(self): + with patch("karrio.mappers.usps_international.proxy.http") as mocks: + mocks.return_value = ShipmentResponseXML + parsed_response = ( + karrio.Shipment.create(self.ShipmentRequest).from_(gateway).parse() + ) + self.assertListEqual(DP.to_dict(parsed_response), ParsedShipmentResponse) + + +if __name__ == "__main__": + unittest.main() + + +shipment_data = { + "shipper": { + "company_name": "Horizon", + "address_line1": "1309 S Agnew Avenue", + "address_line2": "Apt 303", + "city": "Oklahoma City", + "postal_code": "73108", + "country_code": "US", + "person_name": "Lina Smith", + "phone_number": "1234567890", + "state_code": "OK", + }, + "recipient": { + "company_name": "Coffee Five", + "address_line1": "R. da Quitanda, 86 - quiosque 01", + "city": "Centro", + "postal_code": "29440", + "country_code": "BR", + "person_name": "John", + "phone_number": "8005554526", + "state_code": "Rio de Janeiro", + }, + "parcels": [ + { + "height": 9, + "length": 6, + "width": 12, + "weight": 2.0, + "dimension_unit": "CM", + "weight_unit": "KG", + } + ], + "service": "usps_first_class_package_international_service", + "customs": { + "content_type": "merchandise", + "incoterm": "DDU", + "invoice": "INV-040903", + "commodities": [ + { + "weight": 2, + "weight_unit": "KG", + "quantity": 1, + "sku": "XXXXX0000123", + "value_amount": 30, + "value_currency": "USD", + "origin_country": "US", + } + ], + "duty": { + "paid_by": "recipient", + "currency": "USD", + "declared_value": 60, + }, + "certify": True, + "signer": "Admin", + "options": { + "license_number": "LIC-24356879", + "certificate_number": "CERT-97865342", + }, + }, + "options": {"shipment_date": "2021-05-15"}, +} + + +ParsedShipmentResponse = [ + { + "carrier_id": "usps_international", + "carrier_name": "usps_international", + "shipment_identifier": "LZ333007778US", + "tracking_number": "LZ333007778US", + "docs": {"label": ANY}, + "meta": { + "carrier_tracking_link": "https://tools.usps.com/go/TrackConfirmAction?tLabels=LZ333007778US" + }, + }, + [], +] + + +ShipmentRequestXML = """ + 2 + + 6X4LABEL + + Admin + Lina Smith + Horizon + Apt 303 + 1309 S Agnew Avenue + Oklahoma City + 73108 + + 1234567890 + John + John + Coffee Five + + 01 R. da Quitanda, 86 - quiosque + Centro + Rio de Janeiro + Brazil + 29440 + 8005554526 + + + N/A + 1 + 30 + 4.41 + 70.55 + XXXXX0000123 + United States + + + 4.41 + 70.55 + MERCHANDISE + N/A + N + LIC-24356879 + CERT-97865342 + INV-040903 + PDF + ALLINONEFILE + 05/15/2021 + 2.36 + 4.72 + 3.54 + false + I + 847654321 + +""" + +ShipmentRequestQuery = {"API": "eVSFirstClassMailIntl", "XML": ShipmentRequestXML} + +ShipmentResponseXML = """ + 30.42 + 1.11 + LZ333007778US + SUkqAAgAAAASAP4ABAAB... + + + Antiquities, art fossils, historical documents, numismatic material, specimens of flora and fauna, and similar cultural heritage objects that are significant to a nation's identity. Coins; bank notes; currency notes (paper money); securities of any kind payable to bearer; traveler's checks; platinum, gold, and silver (except for jewelry items meeting the requirement in "Restrictions" below); precious stones (except when contained in jewelry items meeting the requirement in "Restrictions" below); and other valuable articles are prohibited. Dog collars with protrusions designed to puncture or bruise an animal's skin. Fruit cartons (used or new). Fur, including raw, tanned, or processed furs or pelts, and goods that may contain such fur that are derived from domesticated cat and dog breeds. Goods bearing the name "Anzac." Goods produced wholly or partly in prisons or by convict labor. Laser pointers and similar handheld devices designed or adapted to emit a laser beam with an accessible emission level greater than 1 megawatt (MW). Most food, plant, and animal products, including the use of products such as straw and other plant material as packing materials. Perishable infectious biological substances. Radioactive materials. Registered philatelic articles with fictitious addresses. Replica firearms, including any article that has the appearance of a firearm that could reasonably be mistaken as a firearm. Seditious literature. Signal jammers capable of preventing or disrupting mobile telephone and satellite navigation services. Silencers for firearms. Tobacco products, including cigarettes and loose-leaf tobacco. Exception: Cigars. Note: Although Australia also permits chewing tobacco and oral snuff in amounts up to 3.3 pounds (1.5 kg), the U.S. Postal Service does not permit these types of smokeless tobacco in international mail - see 136.4. Used bedding. + Airsoft (BB) guns that do not have the appearance of fully automatic firearms require prior approval granted by relevant police representatives. The addressee must submit an application to import the item via the police certification test. Drugs, medicines, and therapeutic substances such as antibiotics, growth hormones, kava, psychoactive substances, and steroids require an import permit from the Australian Department of Health, Office of Drug Control. Fish or parts of fish, including all species of bony fish, sharks, rays, crustaceans, mollusks, and other marine organisms (but not including marine mammals, marine reptiles, or toothfish), whether fresh, frozen, smoked, or preserved in airtight containers, require an import permit from the Australian Fisheries Management Authority (AFMA). Jewelry is permitted only when sent as an insured parcel using Priority Mail International service. In addition, Australian Customs regulations prohibit importation of jewelry that is made with ivory or from endangered species, such as snake, elephant, or crocodile, that does not have an accompanying Import/Export Permit in relation to the Convention on International Trade in Endangered Species of Wild Fauna and Flora (CITES). Knives (such as daggers and throwing knives), throwing blades, or throwing axes require written Police Certification (B709B form or B709X form) from the Australian Police Firearms Registry. Meat and other animal products; powdered or concentrated milk; and other dairy products requires permission to import from the Australian quarantine authorities. Permission of the Australian Director-General of Health is required to import medicines. + Duty may be levied on catalogs, price lists, circulars, and all advertising introduced into Australia through the mail, regardless of the class of mail used. + Country Code: AU Reciprocal Service Name: Express Post Required Customs Form/Endorsement 1. Business and commercial papers. PS Form 2976-B placed inside PS Form 2976-E (plastic envelope). Endorse item clearly next to mailing label as BUSINESS PAPERS. 2. Merchandise samples without commercial value microfilm, microfiche, and computer data. PS Form 2976-B placed inside PS Form 2976-E (plastic envelope). 3. Merchandise and all articles subject to customs duty. PS Form 2976-B placed inside PS Form 2976-E (plastic envelope). Note: 1. Coins; banknotes; currency notes, including paper money; securities of any kind payable to bearer; traveler's checks; platinum, gold, and silver; precious stones; jewelry; watches; and other valuable articles are prohibited in Priority Mail Express International shipments to Australia. 2. Priority Mail Express International With Guarantee service - which offers a date-certain, postage-refund guarantee - is available to Australia. Areas Served: All except Lord Howe Island and the Australian Antarctic territories. + No Additional Restrictions Data found. + + + 109 + Electronic USPS Delivery Confirmation International (E-USPS DELCON INTL) + 0.00 + + + 9810 + +""" diff --git a/modules/connectors/usps_international/tests/usps_international/test_shipment/test_global_express_guaranteed.py b/modules/connectors/usps_international/tests/usps_international/test_shipment/test_global_express_guaranteed.py new file mode 100644 index 0000000000..8f147a7fad --- /dev/null +++ b/modules/connectors/usps_international/tests/usps_international/test_shipment/test_global_express_guaranteed.py @@ -0,0 +1,186 @@ +import unittest +from unittest.mock import patch, ANY +import karrio +from karrio.core.utils import DP +from karrio.core.models import ShipmentRequest +from ..fixture import gateway + + +class TestUSPSGXGShipment(unittest.TestCase): + def setUp(self): + self.maxDiff = None + self.ShipmentRequest = ShipmentRequest(**shipment_data) + + def test_create_shipment_request(self): + requests = gateway.mapper.create_shipment_request(self.ShipmentRequest) + self.assertEqual(requests.serialize(), ShipmentRequestXML) + + def test_parse_shipment_response(self): + with patch("karrio.mappers.usps_international.proxy.http") as mocks: + mocks.return_value = ShipmentResponseXML + parsed_response = ( + karrio.Shipment.create(self.ShipmentRequest).from_(gateway).parse() + ) + self.assertListEqual(DP.to_dict(parsed_response), ParsedShipmentResponse) + + +if __name__ == "__main__": + unittest.main() + + +shipment_data = { + "shipper": { + "company_name": "Horizon", + "address_line1": "1309 S Agnew Avenue", + "address_line2": "Apt 303", + "city": "Oklahoma City", + "postal_code": "73108", + "country_code": "US", + "person_name": "Lina Smith", + "phone_number": "1234567890", + "state_code": "OK", + }, + "recipient": { + "company_name": "Coffee Five", + "address_line1": "R. da Quitanda, 86 - quiosque 01", + "city": "Centro", + "postal_code": "29440", + "country_code": "BR", + "person_name": "John", + "phone_number": "8005554526", + "state_code": "Rio de Janeiro", + }, + "parcels": [ + { + "height": 9, + "length": 6, + "width": 12, + "weight": 2.0, + "dimension_unit": "CM", + "weight_unit": "KG", + } + ], + "service": "usps_global_express_guaranteed_non_document_non_rectangular", + "customs": { + "content_type": "merchandise", + "incoterm": "DDU", + "invoice": "INV-040903", + "commodities": [ + { + "weight": 2, + "weight_unit": "KG", + "quantity": 1, + "sku": "XXXXX0000123", + "value_amount": 30, + "value_currency": "USD", + "origin_country": "US", + } + ], + "duty": { + "paid_by": "recipient", + "currency": "USD", + "declared_value": 60, + }, + "certify": True, + "signer": "Admin", + "options": { + "license_number": "LIC-24356879", + "certificate_number": "CERT-97865342", + }, + }, + "options": {"shipment_date": "2021-05-15", "insurance": 75.0}, +} + + +ParsedShipmentResponse = [ + { + "carrier_id": "usps_international", + "carrier_name": "usps_international", + "shipment_identifier": "8300100690", + "tracking_number": "8300100690", + "docs": {"label": ANY}, + "meta": { + "carrier_tracking_link": "https://tools.usps.com/go/TrackConfirmAction?tLabels=8300100690" + }, + }, + [], +] + + +ShipmentRequestXML = """ + 2 + Admin + Lina Smith + Horizon + 1309 S Agnew Avenue + Apt 303 + Oklahoma City + Oklahoma + 73108 + 1234567890 + John + Coffee Five + 01 R. da Quitanda, 86 - quiosque + + 29440 + 8005554526 + 000 + Rio de Janeiro + PACKAGE + NON-DOC + + + N/A + N/A + 1 + 30 + 4.41 + 70.55 + XXXXX0000123 + United States + + + MERCHANDISE + N + 75.0 + 4.41 + 70.55 + 2.36 + 4.72 + 3.54 + INV-040903 + DDU + Brazil + PDF + 05/15/2021 + false + I + 847654321 + +""" + +ShipmentRequestQuery = {"API": "eVSGXGGetLabel", "XML": ShipmentRequestXML} + +ShipmentResponseXML = """ + 5.00 + + W + 10/22/2020 + + + 8300100690 + 898300100697 + SUkqAAgAAAASAP4ABAAB... + + + + + + + + 0.00 + 4 + + 989 + +""" diff --git a/modules/connectors/usps_international/tests/usps_international/test_shipment/test_priority_express.py b/modules/connectors/usps_international/tests/usps_international/test_shipment/test_priority_express.py new file mode 100644 index 0000000000..155d33945d --- /dev/null +++ b/modules/connectors/usps_international/tests/usps_international/test_shipment/test_priority_express.py @@ -0,0 +1,203 @@ +import unittest +import urllib.parse +from unittest.mock import patch, ANY +import karrio +import karrio.lib as lib +import karrio.core.models as models +from ..fixture import gateway + + +class TestUSPSPriorityExpressShipment(unittest.TestCase): + def setUp(self): + self.maxDiff = None + self.ShipmentRequest = models.ShipmentRequest(**shipment_data) + + def test_create_shipment_request(self): + requests = gateway.mapper.create_shipment_request(self.ShipmentRequest) + self.assertEqual(requests.serialize(), ShipmentRequestXML) + + @patch("karrio.mappers.usps_international.proxy.http", return_value="") + def test_create_shipment(self, http_mock): + karrio.Shipment.create(self.ShipmentRequest).from_(gateway) + + url = http_mock.call_args[1]["url"] + expected_url = f"{gateway.settings.server_url}?{urllib.parse.urlencode(ShipmentRequestQuery)}" + self.assertEqual( + urllib.parse.unquote(url), + urllib.parse.unquote(expected_url), + ) + + def test_parse_shipment_response(self): + with patch("karrio.mappers.usps_international.proxy.http") as mocks: + mocks.return_value = ShipmentResponseXML + parsed_response = ( + karrio.Shipment.create(self.ShipmentRequest).from_(gateway).parse() + ) + self.assertListEqual(lib.to_dict(parsed_response), ParsedShipmentResponse) + + +if __name__ == "__main__": + unittest.main() + + +shipment_data = { + "shipper": { + "company_name": "Horizon", + "address_line1": "1309 S Agnew Avenue", + "address_line2": "Apt 303", + "city": "Oklahoma City", + "postal_code": "73108", + "country_code": "US", + "person_name": "Lina Smith", + "phone_number": "1234567890", + "state_code": "OK", + }, + "recipient": { + "company_name": "Coffee Five", + "address_line1": "R. da Quitanda, 86 - quiosque 01", + "city": "Centro", + "postal_code": "29440", + "country_code": "BR", + "person_name": "John", + "phone_number": "8005554526", + "state_code": "Rio de Janeiro", + }, + "parcels": [ + { + "height": 9, + "length": 6, + "width": 12, + "weight": 2.0, + "dimension_unit": "CM", + "weight_unit": "KG", + "options": {"usps_insurance_express_mail_international": 50.0}, + } + ], + "service": "usps_priority_mail_express_international_legal_flat_rate_envelope", + "customs": { + "content_type": "merchandise", + "incoterm": "DDU", + "invoice": "INV-040903", + "commodities": [ + { + "weight": 2, + "weight_unit": "KG", + "quantity": 1, + "sku": "XXXXX0000123", + "value_amount": 30, + "value_currency": "USD", + "origin_country": "US", + } + ], + "duty": { + "paid_by": "recipient", + "currency": "USD", + "declared_value": 60, + }, + "certify": True, + "signer": "Admin", + "options": { + "license_number": "LIC-24356879", + "certificate_number": "CERT-97865342", + }, + }, + "options": {"shipment_date": "2021-05-15"}, +} + + +ParsedShipmentResponse = [ + { + "carrier_id": "usps_international", + "carrier_name": "usps_international", + "shipment_identifier": "EB321424860US", + "tracking_number": "EB321424860US", + "docs": {"label": ANY}, + "meta": { + "carrier_tracking_link": "https://tools.usps.com/go/TrackConfirmAction?tLabels=EB321424860US" + }, + }, + [], +] + + +ShipmentRequestXML = """ + 2 + + 6X4LABEL + + Admin + Lina Smith + Horizon + Apt 303 + 1309 S Agnew Avenue + Oklahoma City + 73108 + + 1234567890 + John + John + Coffee Five + + R. da Quitanda, 86 - quiosque 01 + Centro + Rio de Janeiro + Brazil + 29440 + 8005554526 + RETURN + + + + N/A + 1 + 30.0 + 4.41 + 70.549999999999997 + XXXXX0000123 + United States + + + 50 + 4.41 + 70.549999999999997 + MERCHANDISE + N/A + Y + LIC-24356879 + CERT-97865342 + INV-040903 + PDF + ALLINONEFILE + 05/15/2021 + 2.36 + 4.72 + 3.54 + false + I + 847654321 + +""" + +ShipmentRequestQuery = {"API": "eVSExpressMailIntl", "XML": ShipmentRequestXML} + +ShipmentResponseXML = """ + 67.84 + 15.00 + 10.83 + EB321424860US + SUkqAAgAAAASAP4ABAAB... + .....removed..... + .....removed..... + .....removed..... + .....removed..... + + Banknotes; currency notes; paper money; securities payable to bearer; and traveler's checks. Coins; manufactured and unmanufactured platinum, gold, and silver; precious stones; jewels; expensive jewelry; and other valuable articles. Commercial samples that promote tobacco products or smoking-related merchandise. Commercial shipments that contain cigarettes, cigarillos, cigars, loose and packaged tobacco, pipes, and other smoking devices. Items that are fragile, either by nature or due to inadequate packing, that could cause harm to individuals or equipment. Medicines whose formulas are not listed in the official pharmacopeias or not licensed by the Brazilian Department of Public Health. Perishable infectious biological substances. Perishable noninfectious biological substances. Playing cards. Poniards, stilettos, poniard blades; canes, umbrellas, or any other articles containing swords, daggers, or guns; handcuffs, and blackjacks. Primary educational books not written in Portuguese. Radioactive materials. Regulation arms and munitions of Brazil and parts. Air guns. Reducing tubes and silencers for firearms. Salted or smoked meat, and other foodstuffs of animal origin. Seeds and seedlings of coffee, shrubs. Used consumer goods (See Observation #5 for exception). + Medicines must be accompanied by a prescription from the attendant Brazilian doctor. This prescription should be on a chemist's form, bearing the name, private address or office of the doctor, his registration number with the Brazil National Medical Council and a Portuguese translation of the instructions, as necessary. Postal packages containing medicaments and not satisfying the above-mentioned conditions will be returned to the senders or, if abandoned, treated as undeliverable items. Postage stamps are admitted only in registered First-Class Package International Service with Registered Mail service shipments. Saccharine and other artificial sweeteners for artificial beverages require permission from the Brazilian Department of Public Health for importation. + 1. Empresa Brasileira de Correios e Telégrafos (ECT) is introducing a "Fee for Postal Dispatch" with a current value of 15 Brazilian reals (BRL) for items presented to customs. If the addressee has not properly paid this fee, ECT will return the item to the sender. 2. Import licenses are required for many kinds of goods. ECT recommends that the sender ascertain from the addressee before mailing that the addressee holds the necessary documents. A shipment that does not have a required import permit is subject to confiscation as contraband. 3. The mailer must affix all necessary or relevant documents including invoices, export/import licenses, certificates of origin, health certificates, etc., to the outside of the item. 4. Imports are allowed by mail, including mail order catalog shipments, up to a value of U.S. $500 (U.S. $1,000 for computer software) without the requirement of an import license provided the item is not for resale. Shipments valued at no more than U.S. $50 are duty-free and are delivered to the addressee; shipments above U.S. $50 can be picked up at the post office upon payment of import duties. Imports that are prohibited or subject to special regulations must comply with applicable Brazilian government provisions. Identical shipments from the same source to the same person or address in Brazil within a 90-day period are considered part of the same shipment and may be subject to confiscation. Other merchandise that usually enters duty-free include items such as newspapers, maps, books, and magazines. 5. The mailer must fully and accurately complete the customs declarations, including the landline or mobile telephone number of the addressee, if available, and detailed information concerning the contents and value of the item, such as branded product description, model, serial number, and value of each individual article within the item. ECT immediately returns to the sender an item that does not have a properly completed customs declaration. 6. The importer tax identification (ID) number is required for all items containing goods. In Brazil, the importer tax ID number is known as "CPF" (format: 000.000.000-00) for natural persons and as "CNPJ" (format: 00.000.000/0000-00) for legal persons. This information must be provided either by the mailer in the importer reference field of the customs declaration form or on the commercial invoice, or by the importer through the Correios website at www2.correios.com.br/sistemas/rastreamento. 7. Shipments that do not indicate the applicable postage and fees on PS Form 2976-A will hinder the customs clearance process, causing delays to clear the items. 8. Used consumer goods may only be sent to charitable organizations that are recognized by the Brazilian government as being entities which serve the public interest. + Country Code: BR Reciprocal Service Name; Serca Required Customs Form/Endorsement 1. Correspondence and business papers. PS Form 2976-B placed inside PS Form 2976-E (plastic envelope). Endorse item clearly next to mailing label as BUSINESS PAPERS. 2. Merchandise, merchandise samples without commercial value, documents, computer data, and all articles subject to customs duty. PS Form 2976-B placed inside PS Form 2976-E (plastic envelope). Include an invoice with all commercial shipments. Note: Coins; banknotes; currency notes, including paper money; securities of any kind payable to bearer; traveler's checks; platinum, gold, and silver; precious stones; jewelry; watches; and other valuable articles are prohibited in Priority Mail Express International shipments to Brazil. Areas Served: All + No Additional Restrictions Data found. + 0 + 3-5 business days to many major markets + 9773 + +""" diff --git a/modules/connectors/usps_international/tests/usps_international/test_shipment/test_priority_mail.py b/modules/connectors/usps_international/tests/usps_international/test_shipment/test_priority_mail.py new file mode 100644 index 0000000000..c787636745 --- /dev/null +++ b/modules/connectors/usps_international/tests/usps_international/test_shipment/test_priority_mail.py @@ -0,0 +1,262 @@ +import unittest +import urllib.parse +from unittest.mock import patch, ANY +import karrio +from karrio.core.utils import DP +from karrio.core.models import ShipmentRequest, ShipmentCancelRequest +from ..fixture import gateway + + +class TestUSPSPriorityMailShipment(unittest.TestCase): + def setUp(self): + self.maxDiff = None + self.ShipmentRequest = ShipmentRequest(**shipment_data) + self.ShipmentCancelRequest = ShipmentCancelRequest(**shipment_cancel_data) + + def test_create_shipment_request(self): + requests = gateway.mapper.create_shipment_request(self.ShipmentRequest) + self.assertEqual(requests.serialize(), ShipmentRequestXML) + + def test_create_cancel_shipment_request(self): + requests = gateway.mapper.create_cancel_shipment_request( + self.ShipmentCancelRequest + ) + self.assertEqual(requests.serialize(), ShipmentCancelRequestXML) + + @patch("karrio.mappers.usps_international.proxy.http", return_value="") + def test_create_shipment(self, http_mock): + karrio.Shipment.create(self.ShipmentRequest).from_(gateway) + + url = http_mock.call_args[1]["url"] + expected_url = f"{gateway.settings.server_url}?{urllib.parse.urlencode(ShipmentRequestQuery)}" + self.assertEqual( + urllib.parse.unquote(url), + urllib.parse.unquote(expected_url), + ) + + @patch("karrio.mappers.usps_international.proxy.http", return_value="") + def test_cancel_shipment(self, http_mock): + karrio.Shipment.cancel(self.ShipmentCancelRequest).from_(gateway) + + url = http_mock.call_args[1]["url"] + self.assertEqual( + url, + f"{gateway.settings.server_url}?{urllib.parse.urlencode(ShipmentCancelRequestQuery)}", + ) + + def test_parse_shipment_response(self): + with patch("karrio.mappers.usps_international.proxy.http") as mocks: + mocks.return_value = ShipmentResponseXML + parsed_response = ( + karrio.Shipment.create(self.ShipmentRequest).from_(gateway).parse() + ) + + self.assertListEqual(DP.to_dict(parsed_response), ParsedShipmentResponse) + + def test_parse_cancel_shipment_response(self): + with patch("karrio.mappers.usps_international.proxy.http") as mocks: + mocks.return_value = ShipmentCancelResponseXML + parsed_response = ( + karrio.Shipment.cancel(self.ShipmentCancelRequest) + .from_(gateway) + .parse() + ) + + self.assertEqual( + DP.to_dict(parsed_response), DP.to_dict(ParsedShipmentCancelResponse) + ) + + +if __name__ == "__main__": + unittest.main() + + +shipment_cancel_data = { + "shipment_identifier": "123456789012", +} + +shipment_data = { + "shipper": { + "company_name": "Horizon", + "address_line1": "1309 S Agnew Avenue", + "address_line2": "Apt 303", + "city": "Oklahoma City", + "postal_code": "73108", + "country_code": "US", + "person_name": "Lina Smith", + "phone_number": "1234567890", + "state_code": "OK", + }, + "recipient": { + "company_name": "Coffee Five", + "address_line1": "R. da Quitanda, 86 - quiosque 01", + "city": "Centro", + "postal_code": "29440", + "country_code": "BR", + "person_name": "John", + "phone_number": "8005554526", + "state_code": "Rio de Janeiro", + }, + "parcels": [ + { + "height": 9, + "length": 6, + "width": 12, + "weight": 2.0, + "dimension_unit": "CM", + "weight_unit": "KG", + "options": {"insurance": 90.0}, + } + ], + "service": "usps_priority_mail_international_large_flat_rate_box", + "customs": { + "content_type": "merchandise", + "incoterm": "DDU", + "invoice": "INV-040903", + "commodities": [ + { + "weight": 2, + "weight_unit": "KG", + "quantity": 1, + "sku": "XXXXX0000123", + "value_amount": 30, + "value_currency": "USD", + "origin_country": "US", + } + ], + "duty": { + "paid_by": "recipient", + "currency": "USD", + "declared_value": 60, + }, + "certify": True, + "signer": "Admin", + "options": { + "license_number": "LIC-24356879", + "certificate_number": "CERT-97865342", + }, + }, + "options": {"shipment_date": "2021-05-15"}, +} + + +ParsedShipmentResponse = [ + { + "carrier_id": "usps_international", + "carrier_name": "usps_international", + "shipment_identifier": "HE200448219US", + "tracking_number": "HE200448219US", + "docs": {"label": ANY}, + "meta": { + "carrier_tracking_link": "https://tools.usps.com/go/TrackConfirmAction?tLabels=HE200448219US" + }, + }, + [], +] + +ParsedShipmentCancelResponse = [ + { + "carrier_id": "usps_international", + "carrier_name": "usps_international", + "operation": "Shipment Cancel", + "success": True, + }, + [], +] + +ShipmentRequestXML = """ + 2 + + 6X4LABEL + + Admin + Lina Smith + Horizon + Apt 303 + 1309 S Agnew Avenue + Oklahoma City + Oklahoma + 73108 + + 1234567890 + John + Coffee Five + + 01 R. da Quitanda, 86 - quiosque + Centro + Rio de Janeiro + Brazil + 29440 + 8005554526 + RETURN + + + + N/A + 1 + 30.0 + 4.41 + 70.549999999999997 + XXXXX0000123 + United States + + + y + 90.0 + 4 + 70 + MERCHANDISE + N/A + N + LIC-24356879 + CERT-97865342 + INV-040903 + PDF + ALLINONEFILE + 05/15/2021 + 4.72 + 2.36 + 3.54 + false + I + 847654321 + +""" + +ShipmentRequestQuery = {"API": "eVSPriorityMailIntl", "XML": ShipmentRequestXML} + +ShipmentCancelRequestXML = """ + 123456789012 + +""" + +ShipmentCancelRequestQuery = {"API": "eVSICancel", "XML": ShipmentCancelRequestXML} + +ShipmentResponseXML = """ + 38.52 + 15.00 + 10.83 + HE200448219US + SUkqAAgAAAASAP4ABAAB... + + + + + + An issue of a publication in which more than 5 percent of its total advertising space is primarily directed to a Canadian market and which indicates: (a) Specifically where goods or services may be obtained in Canada, or (b) Specific items or conditions relating to the sale or provision of goods or services in Canada. All alcoholic beverages including wines, etc. An issue of a publication that contains an advertisement primarily directed to a Canadian market is a prohibited import if that advertisement does not appear in identical form in all editions of the issue distributed in the country of origin. Articles so marked as to create the false impression that they were made in Canada, Great Britain or any other British country. Commercial tags of metal. Firearms, except as follows: Firearms may be mailed to Canada if the importer has the required documentation and if the firearms meet the requirements in Publication 52, subchapter 43 and part 632. Before mailing, customers must visit cbsa-asfc.gc.ca/import/iefw-iefa-eng.html to review Canadian import requirements. Gold bullion, gold dust, and nonmanufactured precious metals. Non-refillable lighters or any other lighter that contains fuel. New lighters with no fuel may be sent. Oleomargarine and other butter substitutes, including altered or renovated butter. Perishable infectious biological substances. Perishable noninfectious biological substances. Plumage and skins of wild birds. Prison-made goods being sold or intended for sale by a person or firm. Radioactive materials. Replica or inert munitions, as well as other devices that simulate explosive devices or munitions, including replica or inert grenades or other simulated military munitions, whether or not such items are for display purposes. Reprints of Canadian or British works copyrighted in Canada. Reproductions of Canadian postage stamps unless printed in publications in black and white only and with a defacing line drawn across each reproduction. Shipments bearing caution labels indicating the contents are flammable. Smoke-making devices for motor vehicles and boats. Used or secondhand hives or bee supplies. Vaping liquids containing 66 mg/g or more nicotine by weight. Note: The U.S. Postal Service measures the concentration of nicotine solutions by volume rather than by weight, so it is possible that a product that does not exceed Canada's weight restriction would exceed the Postal Service's volume restriction. Nicotine solutions with a concentration of 16.67 percent (166 mg/ml) or more, when nicotine is the only toxic material in the liquid, are classified as "dangerous goods" (see 136.1) and as such are prohibited in international mail. (See also Observation 1 + Coins; banknotes; currency notes; securities payable to bearer; traveler's checks; gold, silver, platinum, manufactured or not; jewelry; and other valuable articles may be sent only in registered items First-Class Package International Service with Registered Mail service. Exceptions: Coins sent to or from collectors or dealers may be mailed in ordinary (uninsured) parcels. Drugs and medicines must comply with Canadian law. Eggs for hatching must be packed in new, clean containers and accompanied by a certificate issued by a veterinarian of the U.S. Department of Agriculture, or one issued by a State veterinarian and endorsed by a veterinarian of that Bureau, stating that to the best of his or her knowledge the eggs come from a flock that is free from Newcastle disease, fowl pest, or fowl typhoid. See 135.3 for method of packing. Meat and meat food products must be accompanied by an export certificate issued by the U.S. Department of Agriculture and labeled in accordance with Canadian regulations. Exception to these requirements are: 1. bona fide sample shipments weighing less than 10 kg; 2. meat products addressed to a government department or agency; 3. meat products intended for personal consumption when they enter Canada in the possession of the importer. Pet food that contains biologically appropriate raw food or bones and raw food - such as heat-processed, shelf-stable pet foods, treats, and compound chews - must be accompanied by an import permit from the Canadian Food Inspection Agency and a zoo sanitary certificate from the United States Department of Agriculture (USDA) Animal and Plant Health Inspection Service (APHIS) Veterinarian Services. Attach a copy of both documents to the outside of the mailpiece for review by the Canada Border Service Agency. Precious stones, set or not set; all items used as dress ornaments and coming under the term "jewelry" including articles of gold or other precious metal for personal use such as cigarette holders, cases, powder cases, card cases, opera glasses, fountain pens, watches, etc., are permitted in insured parcels provided the articles have value not over $5 U.S. A parcel containing a number of such articles valued at $5 or less may be insured for the total value of the contents up to a maximum of $200. Veterinary biological products including serums and vaccines must be accompanied by a permit issued by the Veterinary Director General, Ministry of Agriculture of Canada. + 1. As noted in the Prohibitions section, Canada prohibits certain vaping products. However, vaping products, otherwise known as electronic smoking products (i.e., electronic products for the vaporization and administration of inhaled doses of nicotine including electronic cigarettes, cigars, cigarillos, and pipes, as well as cartridges of nicotine solutions and related products), that make health claims are subject to the Canadian Food and Drugs Act (FDA). "Health claims" refers to any statement that represents the product as a drug or device under section 2 of the FDA - for example, a statement that the product will help someone quit smoking. Vaping products that make health claims require authorization under the FDA before being commercially imported, advertised, or sold in Canada. A vaping product that makes health claims is considered a prescription drug, and before importation to Canada, it requires a Drug Establishment License and an assigned corresponding Drug Identification Number (DIN). For more information, visit canada.ca/en/health-canada/topics/licensing-authorizing-manufacturing-drug-health-products.html. Vaping products with no health claims and no drugs other than nicotine are not subject to the FDA. 2. Banknotes valued at $100 or more must be put up in a compact package and securely tied with strong twine before wrapping. The wrapper must be linen or other strong, woven material, linen lined paper, or two thicknesses of strong kraft paper. After wrapping, the package must be again securely tied or stitched and sealed at the points of closing. 3. The name of the Canadian province in which the office of destination is located must appear as part of the address. 4. The following must not be accepted for insurance: Bees, postage stamps (canceled and uncanceled) and albums in which they are mounted, and parcels addressed to CFPOs. 5. Canadian Forces Mail (CFPO) is processed through Canadian military post offices and must be addressed in the following manner: (a) NUMBER, RANK, NAME UNIT (b) CFPO (NUMBER) (c) BELLEVILLE ON K0K 3R0 (d) CANADA Maximum weight limits for mail addressed to members of the Canadian Forces based outside of Canada (CFPO) is 22 pounds. Parcels for CFPO addresses may not be insured. Direct sacks of printed matter (M-bags) are not permitted for CFPO addresses. 6. A letter fully prepaid and bearing the same address as that of a parcel may be tied or otherwise securely attached to the outside of the parcel. Stamps to cover postage on the parcel must be affixed to the wrapper of the parcel. Stamps to pay postage on the letter must be affixed to the envelope thereof. 7. Certain types of merchandise must be marked to show country of origin in the manner prescribed by the Canadian customs regulations. 8. Goods valued under 20 Canadian dollars are duty and excise tax exempt. Goods over 20 Canadian dollars will be subject to the applicable duties and excise taxes. Gift shipments received by mail that are valued under 60 Canadian dollars are duty and excise tax exempt. 9. For all casual and commercial goods valued at or under 1,600 Canadian dollars, Canada Post will collect assessed duties, excise taxes, and a handling fee from the addressee. This handling fee is set by Canada Post (see http://www.canadapost.ca/tools/pg/manual/PGcustoms-e.asp). All commercial mail items over 1,600 Canadian dollars will be held by Canada Customs and Excise until proper invoice and accounting documentation is provided by the addressee. 10. The Canada Customs Invoice can be obtained from stationery, office supply, or printing companies. If mailers are unable to obtain the Canada Customs Invoice locally, they should visit the following Web site: www.canadapost.ca. In addition, commercial invoices are acceptable provided that each invoice has the required information for customs purposes. 11. Information on Canadian customs regulations may be obtained from the Office of International Marketing/223, Bureau of International Commerce, Department of Commerce, Washington, DC 20230, or any field office of that Department. Obtaining post code information: 12. Information on Canadian post code directories can be obtained from: (a) NATIONAL PHILATELIC CENTER CANADA POST CORPORATION STATION 1 ANTIGONISH NS B2G 2R8 Telephone: 1-800-565-4362 Fax: 1-902-863-6796 (b) To obtain Canadian post codes for specific addresses, call the General Information line at 1-416-979-8822 or access the Canada Post Corporation web site on the Internet at http://www.canadapost.ca. 13. Pursuant to the Canada Customs Act and a need to heighten border security, Canada will deny entry of all postal items (except postcards) that do not bear complete sender and addressee information in roman letters and arabic numerals. + Country Code: CA Reciprocal Service Name: There is no reciprocal service. Required Customs Form/Endorsement 1. Business papers and commercial documents. PS Form 2976-B placed inside PS Form 2976-E (plastic envelope). 2. Merchandise samples and gift shipments (non-commercial parcels). PS Form 2976-B placed inside PS Form 2976-E (plastic envelope). 3. Merchandise (commercial shipments) and all articles subject to customs duty. PS Form 2976-B placed inside PS Form 2976-E (plastic envelope). Notes: 1. Gift shipments (non-commercial parcels) require a sales receipt, invoice or other documentation to support the declared value. 2. Coins; banknotes; currency notes, including paper money; securities of any kind payable to bearer; traveler's checks; platinum, gold, and silver; precious stones; jewelry; watches; and other valuable articles are prohibited in Priority Mail Express International shipments to Canada. 3. Priority Mail Express International shipments may have a street address or a post office box address. A local telephone number for the addressee MUST be provided for shipments addressed to a post office box address. A local telephone number for the addressee should be provided if possible for shipments to a street address. Areas Served: All + No Additional Restrictions Data found. + + 0.00 + 9954 + +""" + +ShipmentCancelResponseXML = """ + EC502016316US + Cancelled + Order Cancelled Successfully + +""" diff --git a/modules/connectors/usps_international/tests/usps_international/test_tracking.py b/modules/connectors/usps_international/tests/usps_international/test_tracking.py index 9eaa86184d..b504dd92fa 100644 --- a/modules/connectors/usps_international/tests/usps_international/test_tracking.py +++ b/modules/connectors/usps_international/tests/usps_international/test_tracking.py @@ -1,225 +1,147 @@ import unittest -from unittest.mock import patch, ANY +from unittest.mock import patch from .fixture import gateway -from tests import logger - -import karrio -import karrio.lib as lib -import karrio.core.models as models +from karrio.core.utils import DP +from karrio.core.models import TrackingRequest +from karrio import Tracking class TestUSPSTracking(unittest.TestCase): def setUp(self): self.maxDiff = None - self.TrackingRequest = models.TrackingRequest(**TrackingPayload) + self.TrackingRequest = TrackingRequest(tracking_numbers=TRACKING_PAYLOAD) def test_create_tracking_request(self): request = gateway.mapper.create_tracking_request(self.TrackingRequest) - logger.debug(request.serialize()) - self.assertEqual(request.serialize(), TrackingRequest) - - def test_get_tracking(self): - with patch("karrio.mappers.usps_international.proxy.lib.request") as mock: - mock.return_value = "{}" - karrio.Tracking.fetch(self.TrackingRequest).from_(gateway) - - self.assertEqual( - mock.call_args[1]["url"], - f"{gateway.settings.server_url}/v3/tracking/89108749065090", - ) + self.assertEqual(request.serialize(), TRACKING_REQUEST) def test_parse_tracking_response(self): - with patch("karrio.mappers.usps_international.proxy.lib.request") as mock: - mock.return_value = TrackingResponse + with patch("karrio.mappers.usps_international.proxy.http") as mock: + mock.return_value = TRACKING_RESPONSE parsed_response = ( - karrio.Tracking.fetch(self.TrackingRequest).from_(gateway).parse() + Tracking.fetch(self.TrackingRequest).from_(gateway).parse() ) - logger.debug(lib.to_dict(parsed_response)) - self.assertListEqual(lib.to_dict(parsed_response), ParsedTrackingResponse) - def test_parse_error_response(self): - with patch("karrio.mappers.usps_international.proxy.lib.request") as mock: - mock.return_value = ErrorResponse - parsed_response = ( - karrio.Tracking.fetch(self.TrackingRequest).from_(gateway).parse() - ) - logger.debug(lib.to_dict(parsed_response)) - self.assertListEqual(lib.to_dict(parsed_response), ParsedErrorResponse) + self.assertListEqual(DP.to_dict(parsed_response), PARSED_TRACKING_RESPONSE) if __name__ == "__main__": unittest.main() -TrackingPayload = { - "tracking_numbers": ["89108749065090"], -} +TRACKING_PAYLOAD = ["XXXXXXXXXXXX1"] -ParsedTrackingResponse = [ +PARSED_TRACKING_RESPONSE = [ [ { "carrier_id": "usps_international", "carrier_name": "usps_international", "delivered": False, - "estimated_delivery": "2019-08-24", "events": [ { - "code": "string", - "date": "2019-08-24", - "description": "string", - "location": "string, string, string, string", - "time": "14:15 PM", - } + "code": "10", + "date": "2016-01-06", + "description": "Arrived at USPS Facility", + "location": "COLUMBUS, OH, 43218", + "time": "10:45 AM", + }, + { + "code": "03", + "date": "2016-01-06", + "description": "Acceptance", + "location": "LAKE CHARLES, IL, 12345", + "time": "09:10 AM", + }, ], + "tracking_number": "XXXXXXXXXX1", "info": { - "carrier_tracking_link": "https://tools.usps.com/go/TrackConfirmAction?tLabels=string", - "expected_delivery": "2019-08-24", - "shipment_destination_country": "string", - "shipment_destination_postal_code": "string", - "shipment_origin_country": "st", - "shipment_origin_postal_code": "strin", - "shipment_service": "string", + "carrier_tracking_link": "https://tools.usps.com/go/TrackConfirmAction?tLabels=XXXXXXXXXX1", + "shipment_destination_postal_code": 12345, + "shipment_origin_postal_code": "12345", + "shipment_service": "First-Class Package Service - Retail", }, "status": "in_transit", - "tracking_number": "string", } ], [], ] -ParsedErrorResponse = [ - [], - [ - { - "carrier_id": "usps_international", - "carrier_name": "usps_international", - "code": "string", - "details": { - "errors": [ - { - "code": "string", - "detail": "string", - "source": {"example": "string", "parameter": "string"}, - "status": "string", - "title": "string", - } - ], - "tracking_number": "89108749065090", - }, - "message": "string", - } - ], -] - -TrackingRequest = ["89108749065090"] - -TrackingResponse = """{ - "trackingNumber": "string", - "additionalInfo": "string", - "ADPScripting": "string", - "archiveRestoreInfo": "string", - "associatedLabel": "string", - "carrierRelease": true, - "mailClass": "BOUND_PRINTED_MATTER", - "destinationCity": "string", - "destinationCountryCode": "string", - "destinationState": "st", - "destinationZIP": "string", - "editedLabelId": "string", - "emailEnabled": true, - "endOfDay": "string", - "eSOFEligible": true, - "expectedDeliveryTimeStamp": "2019-08-24T14:15:22Z", - "expectedDeliveryType": "string", - "guaranteedDeliveryTimeStamp": "2019-08-24T14:15:22Z", - "guaranteedDetails": "string", - "itemShape": "LETTER", - "kahalaIndicator": true, - "mailType": "INTERNATIONAL_INBOUND", - "approximateIntakeDate": "string", - "uniqueTrackingId": "string", - "onTime": true, - "originCity": "string", - "originCountry": "st", - "originState": "str", - "originZIP": "strin", - "proofOfDeliveryEnabled": true, - "predictedDeliveryTimeStamp": "2019-08-24T14:15:22Z", - "predictedDeliveryDate": "2019-08-24", - "predictedDeliveryWindowStartTime": "string", - "predictedDeliveryWindowEndTime": "string", - "relatedReturnReceiptID": "string", - "redeliveryEnabled": true, - "enabledNotificationRequests": { - "SMS": { - "futureDelivery": true, - "alertDelivery": true, - "todayDelivery": true, - "UP": true, - "DND": true - }, - "EMail": { - "futureDelivery": true, - "alertDelivery": true, - "todayDelivery": true, - "UP": true, - "DND": true, - "firstDisplayable": true, - "otherActivity": true - } - }, - "restoreEnabled": true, - "returnDateNotice": "2019-08-24", - "RRAMenabled": true, - "RREEnabled": true, - "services": ["string"], - "serviceTypeCode": "string", - "status": "string", - "statusCategory": "string", - "statusSummary": "Your item was delivered at 12:55 pm on April 05, 2010 in FALMOUTH, MA 02540", - "trackingProofOfDeliveryEnabled": true, - "valueofArticle": "string", - "extendRetentionPurchasedCode": "string", - "extendRetentionExtraServiceCodeOptions": [{}], - "trackingEvents": [ - { - "eventType": "string", - "eventTimestamp": "2019-08-24T14:15:22Z", - "GMTTimestamp": "2024-04-04T14:03:12.041Z", - "GMTOffset": "-7:00", - "eventCountry": "string", - "eventCity": "string", - "eventState": "string", - "eventZIP": "string", - "firm": "string", - "name": "string", - "authorizedAgent": true, - "eventCode": "string", - "actionCode": "string", - "reasonCode": "string" - } - ] -} +TRACKING_REQUEST = """ + 1 + 127.0.0.1 + Karrio + + """ -ErrorResponse = """{ - "apiVersion": "string", - "error": { - "code": "string", - "message": "string", - "errors": [ - { - "status": "string", - "code": "string", - "title": "string", - "detail": "string", - "source": { - "parameter": "string", - "example": "string" - } - } - ] - } -} +TRACKING_RESPONSE = """ + + + First-Class Package Service - Retail + BP + KBEA + TX + 12345 + true + false + DM + 2016-01-08 10:34:04.000000 + 412725500 + LAKE CHARLES + IL + 12345 + false + false + false + false + USPS Tracking<SUP>&#174;</SUP> + 346 + Arrived at facility + In Transit + Your item arrived at our USPS facility in COLUMBUS, OH 43218 on January 6, 2016 at 10:45 pm. The item is currently in transit to the destination. + T + true + + + true + true + true + true + true + true + true + + + true + true + true + true + true + true + true + + + + 10:45 pm + January 6, 2016 + Arrived at USPS Facility + COLUMBUS + OH + 43218 + false + 10 + + + 9:10 am + January 6, 2016 + Acceptance + LAKE CHARLES + IL + 12345 + false + 03 + + + """ diff --git a/modules/connectors/usps_rest/README.md b/modules/connectors/usps_rest/README.md new file mode 100644 index 0000000000..c8514d21aa --- /dev/null +++ b/modules/connectors/usps_rest/README.md @@ -0,0 +1,31 @@ + +# karrio.usps_rest + +This package is a USPS extension of the [karrio](https://pypi.org/project/karrio) multi carrier shipping SDK. + +## Requirements + +`Python 3.7+` + +## Installation + +```bash +pip install karrio.usps_rest +``` + +## Usage + +```python +import karrio +from karrio.mappers.usps_rest.settings import Settings + + +# Initialize a carrier gateway +usps_rest = karrio.gateway["usps_rest"].create( + Settings( + ... + ) +) +``` + +Check the [Karrio Mutli-carrier SDK docs](https://docs.karrio.io) for Shipping API requests diff --git a/modules/connectors/usps_rest/generate b/modules/connectors/usps_rest/generate new file mode 100755 index 0000000000..482e84931a --- /dev/null +++ b/modules/connectors/usps_rest/generate @@ -0,0 +1,24 @@ +SCHEMAS=./schemas +LIB_MODULES=./karrio/schemas/usps_rest +find "${LIB_MODULES}" -name "*.py" -exec rm -r {} \; +touch "${LIB_MODULES}/__init__.py" + +quicktype() { + echo "Generating $1..." + docker run -it --rm --name quicktype -v $PWD:/app -e SCHEMAS=/app/schemas -e LIB_MODULES=/app/karrio/schemas/usps_rest \ + karrio/tools /quicktype/script/quicktype --no-uuids --no-date-times --no-enums --src-lang json --lang jstruct \ + --no-nice-property-names --all-properties-optional --type-as-suffix $@ +} + +quicktype --src="${SCHEMAS}/error_response.json" --out="${LIB_MODULES}/error_response.py" +quicktype --src="${SCHEMAS}/label_request.json" --out="${LIB_MODULES}/label_request.py" +quicktype --src="${SCHEMAS}/label_response.json" --out="${LIB_MODULES}/label_response.py" +quicktype --src="${SCHEMAS}/pickup_request.json" --out="${LIB_MODULES}/pickup_request.py" +quicktype --src="${SCHEMAS}/pickup_response.json" --out="${LIB_MODULES}/pickup_response.py" +quicktype --src="${SCHEMAS}/pickup_update_request.json" --out="${LIB_MODULES}/pickup_update_request.py" +quicktype --src="${SCHEMAS}/pickup_update_response.json" --out="${LIB_MODULES}/pickup_update_response.py" +quicktype --src="${SCHEMAS}/rate_request.json" --out="${LIB_MODULES}/rate_request.py" +quicktype --src="${SCHEMAS}/rate_response.json" --out="${LIB_MODULES}/rate_response.py" +quicktype --src="${SCHEMAS}/scan_form_request.json" --out="${LIB_MODULES}/scan_form_request.py" +quicktype --src="${SCHEMAS}/scan_form_response.json" --out="${LIB_MODULES}/scan_form_response.py" +quicktype --src="${SCHEMAS}/tracking_response.json" --out="${LIB_MODULES}/tracking_response.py" diff --git a/modules/connectors/usps_rest/karrio/mappers/usps_rest/__init__.py b/modules/connectors/usps_rest/karrio/mappers/usps_rest/__init__.py new file mode 100644 index 0000000000..4996ecf83c --- /dev/null +++ b/modules/connectors/usps_rest/karrio/mappers/usps_rest/__init__.py @@ -0,0 +1,22 @@ +from karrio.core.metadata import Metadata + +from karrio.mappers.usps_rest.mapper import Mapper +from karrio.mappers.usps_rest.proxy import Proxy +from karrio.mappers.usps_rest.settings import Settings +import karrio.providers.usps_rest.units as units +import karrio.providers.usps_rest.utils as utils + + +METADATA = Metadata( + id="usps_rest", + label="USPS", + # Integrations + Mapper=Mapper, + Proxy=Proxy, + Settings=Settings, + # Data Units + is_hub=False, + options=units.ShippingOption, + services=units.ShippingService, + connection_configs=utils.ConnectionConfig, +) diff --git a/modules/connectors/usps_rest/karrio/mappers/usps_rest/mapper.py b/modules/connectors/usps_rest/karrio/mappers/usps_rest/mapper.py new file mode 100644 index 0000000000..9b7be43e83 --- /dev/null +++ b/modules/connectors/usps_rest/karrio/mappers/usps_rest/mapper.py @@ -0,0 +1,88 @@ +"""Karrio USPS client mapper.""" + +import typing +import karrio.lib as lib +import karrio.api.mapper as mapper +import karrio.core.models as models +import karrio.providers.usps_rest as provider +import karrio.mappers.usps_rest.settings as provider_settings + + +class Mapper(mapper.Mapper): + settings: provider_settings.Settings + + def create_rate_request(self, payload: models.RateRequest) -> lib.Serializable: + return provider.rate_request(payload, self.settings) + + def create_tracking_request( + self, payload: models.TrackingRequest + ) -> lib.Serializable: + return provider.tracking_request(payload, self.settings) + + def create_shipment_request( + self, payload: models.ShipmentRequest + ) -> lib.Serializable: + return provider.shipment_request(payload, self.settings) + + def create_pickup_request(self, payload: models.PickupRequest) -> lib.Serializable: + return provider.pickup_request(payload, self.settings) + + def create_pickup_update_request( + self, payload: models.PickupUpdateRequest + ) -> lib.Serializable: + return provider.pickup_update_request(payload, self.settings) + + def create_cancel_pickup_request( + self, payload: models.PickupCancelRequest + ) -> lib.Serializable: + return provider.pickup_cancel_request(payload, self.settings) + + def create_cancel_shipment_request( + self, payload: models.ShipmentCancelRequest + ) -> lib.Serializable[str]: + return provider.shipment_cancel_request(payload, self.settings) + + def create_manifest_request( + self, payload: models.ManifestRequest + ) -> lib.Serializable: + return provider.manifest_request(payload, self.settings) + + def parse_cancel_pickup_response( + self, response: lib.Deserializable[str] + ) -> typing.Tuple[models.ConfirmationDetails, typing.List[models.Message]]: + return provider.parse_pickup_cancel_response(response, self.settings) + + def parse_cancel_shipment_response( + self, response: lib.Deserializable[str] + ) -> typing.Tuple[models.ConfirmationDetails, typing.List[models.Message]]: + return provider.parse_shipment_cancel_response(response, self.settings) + + def parse_pickup_response( + self, response: lib.Deserializable[str] + ) -> typing.Tuple[models.PickupDetails, typing.List[models.Message]]: + return provider.parse_pickup_response(response, self.settings) + + def parse_pickup_update_response( + self, response: lib.Deserializable[str] + ) -> typing.Tuple[models.PickupDetails, typing.List[models.Message]]: + return provider.parse_pickup_update_response(response, self.settings) + + def parse_rate_response( + self, response: lib.Deserializable[str] + ) -> typing.Tuple[typing.List[models.RateDetails], typing.List[models.Message]]: + return provider.parse_rate_response(response, self.settings) + + def parse_shipment_response( + self, response: lib.Deserializable[str] + ) -> typing.Tuple[models.ShipmentDetails, typing.List[models.Message]]: + return provider.parse_shipment_response(response, self.settings) + + def parse_tracking_response( + self, response: lib.Deserializable[str] + ) -> typing.Tuple[typing.List[models.TrackingDetails], typing.List[models.Message]]: + return provider.parse_tracking_response(response, self.settings) + + def parse_manifest_response( + self, response: lib.Deserializable[str] + ) -> typing.Tuple[models.ManifestDetails, typing.List[models.Message]]: + return provider.parse_manifest_response(response, self.settings) diff --git a/modules/connectors/usps_rest/karrio/mappers/usps_rest/proxy.py b/modules/connectors/usps_rest/karrio/mappers/usps_rest/proxy.py new file mode 100644 index 0000000000..10cbab4691 --- /dev/null +++ b/modules/connectors/usps_rest/karrio/mappers/usps_rest/proxy.py @@ -0,0 +1,151 @@ +"""Karrio USPS client proxy.""" + +import karrio.lib as lib +import karrio.api.proxy as proxy +import karrio.mappers.usps_rest.settings as provider_settings + + +class Proxy(proxy.Proxy): + settings: provider_settings.Settings + + def get_rates(self, request: lib.Serializable) -> lib.Deserializable[str]: + response = lib.run_asynchronously( + lambda _: lib.request( + url=f"{self.settings.server_url}/v3/total-rates/search", + data=lib.to_json(_), + trace=self.trace_as("json"), + method="POST", + headers={ + "Content-Type": "application/json", + "Authorization": f"Bearer {self.settings.access_token}", + }, + ), + request.serialize(), + ) + + return lib.Deserializable(response, lambda _: [lib.to_dict(_) for _ in _]) + + def create_shipment(self, request: lib.Serializable) -> lib.Deserializable[str]: + response = lib.run_asynchronously( + lambda _: lib.request( + url=f"{self.settings.server_url}/v3/label", + data=lib.to_json(_), + trace=self.trace_as("json"), + method="POST", + headers={ + "Content-Type": "application/json", + "Authorization": f"Bearer {self.settings.access_token}", + }, + ), + request.serialize(), + ) + + return lib.Deserializable( + response, + lambda _: [lib.to_dict(_) for _ in _], + request.ctx, + ) + + def cancel_shipment(self, request: lib.Serializable) -> lib.Deserializable[str]: + response = lib.run_asynchronously( + lambda _: ( + _["trackingNumber"], + lib.request( + url=f"{self.settings.server_url}/v3/label/{_['trackingNumber']}", + data=lib.to_json(request.serialize()), + trace=self.trace_as("json"), + method="POST", + headers={ + "Content-Type": "application/json", + "Authorization": f"Bearer {self.settings.access_token}", + }, + on_ok=lambda _: '{"ok": true}', + ), + ), + request.serialize(), + ) + + return lib.Deserializable( + response, + lambda __: [(_[0], lib.to_dict(_[1])) for _ in __], + ) + + def get_tracking(self, request: lib.Serializable) -> lib.Deserializable[str]: + response = lib.run_asynchronously( + lambda trackingNumber: ( + trackingNumber, + lib.request( + url=f"{self.settings.server_url}/v3/tracking/{trackingNumber}", + data=lib.to_json(request.serialize()), + trace=self.trace_as("json"), + method="POST", + headers={ + "Content-Type": "application/json", + "Authorization": f"Bearer {self.settings.access_token}", + }, + ), + ), + request.serialize(), + ) + + return lib.Deserializable( + response, + lambda __: [(_[0], lib.to_dict(_[1])) for _ in __], + ) + + def schedule_pickup(self, request: lib.Serializable) -> lib.Deserializable[str]: + response = lib.request( + url=f"{self.settings.server_url}/v3/carrier-pickup", + data=lib.to_json(request.serialize()), + trace=self.trace_as("json"), + method="POST", + headers={ + "Content-Type": "application/json", + "Authorization": f"Bearer {self.settings.access_token}", + }, + ) + + return lib.Deserializable(response, lib.to_dict) + + def modify_pickup(self, request: lib.Serializable) -> lib.Deserializable[str]: + response = lib.request( + url=f"{self.settings.server_url}/v3/carrier-pickup/{request.ctx['confirmationNumber']}", + data=lib.to_json(request.serialize()), + trace=self.trace_as("json"), + method="POST", + headers={ + "Content-Type": "application/json", + "Authorization": f"Bearer {self.settings.access_token}", + }, + ) + + return lib.Deserializable(response, lib.to_dict) + + def cancel_pickup(self, request: lib.Serializable) -> lib.Deserializable[str]: + response = lib.request( + url=f"{self.settings.server_url}/v3/carrier-pickup/{request.serialize()['confirmationNumber']}", + data=lib.to_json(request.serialize()), + trace=self.trace_as("json"), + method="POST", + headers={ + "Content-Type": "application/json", + "Authorization": f"Bearer {self.settings.access_token}", + }, + on_ok=lambda _: '{"ok": true}', + ) + + return lib.Deserializable(response, lib.to_dict) + + def create_manifest(self, request: lib.Serializable) -> lib.Deserializable[str]: + response = lib.request( + url=f"{self.settings.server_url}/v3/scan-form", + data=lib.to_json(request.serialize()), + trace=self.trace_as("json"), + method="POST", + headers={ + "Content-Type": "application/json", + "Authorization": f"Bearer {self.settings.access_token}", + }, + ) + + return lib.Deserializable(response, lib.to_dict) diff --git a/modules/connectors/usps_rest/karrio/mappers/usps_rest/settings.py b/modules/connectors/usps_rest/karrio/mappers/usps_rest/settings.py new file mode 100644 index 0000000000..7998573a86 --- /dev/null +++ b/modules/connectors/usps_rest/karrio/mappers/usps_rest/settings.py @@ -0,0 +1,23 @@ +"""Karrio USPS client settings.""" + +import attr +import karrio.providers.usps_rest.utils as provider_utils + + +@attr.s(auto_attribs=True) +class Settings(provider_utils.Settings): + """USPS connection settings.""" + + # Add carrier specific API connection properties here + client_id: str + client_secret: str + account_type: str = None + account_number: str = None + + # generic properties + id: str = None + test_mode: bool = False + carrier_id: str = "usps_rest" + account_country_code: str = "US" + metadata: dict = {} + config: dict = {} diff --git a/modules/connectors/usps_rest/karrio/providers/usps_rest/__init__.py b/modules/connectors/usps_rest/karrio/providers/usps_rest/__init__.py new file mode 100644 index 0000000000..09ce39b2d0 --- /dev/null +++ b/modules/connectors/usps_rest/karrio/providers/usps_rest/__init__.py @@ -0,0 +1,26 @@ +"""Karrio USPS provider imports.""" + +from karrio.providers.usps_rest.utils import Settings +from karrio.providers.usps_rest.rate import parse_rate_response, rate_request +from karrio.providers.usps_rest.shipment import ( + parse_shipment_cancel_response, + parse_shipment_response, + shipment_cancel_request, + shipment_request, +) +from karrio.providers.usps_rest.pickup import ( + parse_pickup_cancel_response, + parse_pickup_update_response, + parse_pickup_response, + pickup_update_request, + pickup_cancel_request, + pickup_request, +) +from karrio.providers.usps_rest.tracking import ( + parse_tracking_response, + tracking_request, +) +from karrio.providers.usps_rest.manifest import ( + parse_manifest_response, + manifest_request, +) diff --git a/modules/connectors/usps_rest/karrio/providers/usps_rest/error.py b/modules/connectors/usps_rest/karrio/providers/usps_rest/error.py new file mode 100644 index 0000000000..4348736305 --- /dev/null +++ b/modules/connectors/usps_rest/karrio/providers/usps_rest/error.py @@ -0,0 +1,26 @@ +"""Karrio USPS error parser.""" + +import typing +import karrio.lib as lib +import karrio.core.models as models +import karrio.providers.usps_rest.utils as provider_utils + + +def parse_error_response( + response: typing.Union[dict, typing.List[dict]], + settings: provider_utils.Settings, + **kwargs, +) -> typing.List[models.Message]: + responses = response if isinstance(response, list) else [response] + errors: list = [response["error"] for response in responses if "error" in response] + + return [ + models.Message( + carrier_id=settings.carrier_id, + carrier_name=settings.carrier_name, + code=error.get("code"), + message=error.get("message"), + details={**kwargs, "errors": error.get("errors", [])}, + ) + for error in errors + ] diff --git a/modules/connectors/usps_international/karrio/providers/usps_international/manifest.py b/modules/connectors/usps_rest/karrio/providers/usps_rest/manifest.py similarity index 85% rename from modules/connectors/usps_international/karrio/providers/usps_international/manifest.py rename to modules/connectors/usps_rest/karrio/providers/usps_rest/manifest.py index b08ed7f9d8..716510270a 100644 --- a/modules/connectors/usps_international/karrio/providers/usps_international/manifest.py +++ b/modules/connectors/usps_rest/karrio/providers/usps_rest/manifest.py @@ -1,15 +1,15 @@ """Karrio USPS manifest API implementation.""" -import karrio.schemas.usps_international.scan_form_request as usps -import karrio.schemas.usps_international.scan_form_response as manifest +import karrio.schemas.usps_rest.scan_form_request as usps_rest +import karrio.schemas.usps_rest.scan_form_response as manifest import time import typing import karrio.lib as lib import karrio.core.models as models -import karrio.providers.usps_international.error as error -import karrio.providers.usps_international.utils as provider_utils -import karrio.providers.usps_international.units as provider_units +import karrio.providers.usps_rest.error as error +import karrio.providers.usps_rest.utils as provider_utils +import karrio.providers.usps_rest.units as provider_units def parse_manifest_response( @@ -66,8 +66,8 @@ def manifest_request( ), ) - # map data to convert karrio model to usps specific type - request = usps.ScanFormRequestType( + # map data to convert karrio model to usps_rest specific type + request = usps_rest.ScanFormRequestType( form="5630", imageType="PDF", labelType="8.5x11LABEL", @@ -77,10 +77,10 @@ def manifest_request( destinationEntryFacilityType=lib.identity( options.usps_destination_entry_facility_type.state or "NONE" ), - shipment=usps.ShipmentType( + shipment=usps_rest.ShipmentType( trackingNumbers=payload.shipment_identifiers, ), - fromAddress=usps.FromAddressType( + fromAddress=usps_rest.FromAddressType( ignoreBadAddress=options.usps_ignore_bad_address.state or False, streetAddress=address.address_line1, secondaryAddress=address.address_line2, diff --git a/modules/connectors/usps_rest/karrio/providers/usps_rest/pickup/__init__.py b/modules/connectors/usps_rest/karrio/providers/usps_rest/pickup/__init__.py new file mode 100644 index 0000000000..3857721ca0 --- /dev/null +++ b/modules/connectors/usps_rest/karrio/providers/usps_rest/pickup/__init__.py @@ -0,0 +1,12 @@ +from karrio.providers.usps_rest.pickup.create import ( + parse_pickup_response, + pickup_request, +) +from karrio.providers.usps_rest.pickup.update import ( + parse_pickup_update_response, + pickup_update_request, +) +from karrio.providers.usps_rest.pickup.cancel import ( + parse_pickup_cancel_response, + pickup_cancel_request, +) diff --git a/modules/connectors/usps_rest/karrio/providers/usps_rest/pickup/cancel.py b/modules/connectors/usps_rest/karrio/providers/usps_rest/pickup/cancel.py new file mode 100644 index 0000000000..4c20281eb9 --- /dev/null +++ b/modules/connectors/usps_rest/karrio/providers/usps_rest/pickup/cancel.py @@ -0,0 +1,40 @@ +import typing +import karrio.lib as lib +import karrio.core.units as units +import karrio.core.models as models +import karrio.providers.usps_rest.error as error +import karrio.providers.usps_rest.utils as provider_utils +import karrio.providers.usps_rest.units as provider_units + + +def parse_pickup_cancel_response( + _response: lib.Deserializable[dict], + settings: provider_utils.Settings, +) -> typing.Tuple[models.ConfirmationDetails, typing.List[models.Message]]: + response = _response.deserialize() + messages = error.parse_error_response(response, settings) + success = response.get("ok") == True + + confirmation = ( + models.ConfirmationDetails( + carrier_id=settings.carrier_id, + carrier_name=settings.carrier_name, + operation="Cancel Pickup", + success=success, + ) + if success + else None + ) + + return confirmation, messages + + +def pickup_cancel_request( + payload: models.PickupCancelRequest, + settings: provider_utils.Settings, +) -> lib.Serializable: + + # map data to convert karrio model to usps_rest specific type + request = dict(confirmationNumber=payload.confirmation_number) + + return lib.Serializable(request, lib.to_dict) diff --git a/modules/connectors/usps_rest/karrio/providers/usps_rest/pickup/create.py b/modules/connectors/usps_rest/karrio/providers/usps_rest/pickup/create.py new file mode 100644 index 0000000000..69b772b335 --- /dev/null +++ b/modules/connectors/usps_rest/karrio/providers/usps_rest/pickup/create.py @@ -0,0 +1,102 @@ +"""Karrio USPS schedule pickup implementation.""" + +import karrio.schemas.usps_rest.pickup_request as usps_rest +import karrio.schemas.usps_rest.pickup_response as pickup + +import typing +import karrio.lib as lib +import karrio.core.units as units +import karrio.core.models as models +import karrio.providers.usps_rest.error as error +import karrio.providers.usps_rest.utils as provider_utils +import karrio.providers.usps_rest.units as provider_units + + +def parse_pickup_response( + _response: lib.Deserializable[dict], + settings: provider_utils.Settings, +) -> typing.Tuple[typing.List[models.RateDetails], typing.List[models.Message]]: + response = _response.deserialize() + + messages = error.parse_error_response(response, settings) + pickup = ( + _extract_details(response, settings) + if "confirmationNumber" in response + else None + ) + + return pickup, messages + + +def _extract_details( + data: dict, + settings: provider_utils.Settings, +) -> models.PickupDetails: + details = lib.to_object(pickup.PickupResponseType, data) + + return models.PickupDetails( + carrier_id=settings.carrier_id, + carrier_name=settings.carrier_name, + confirmation_number=details.confirmationNumber, + pickup_date=lib.fdate(details.pickupDate), + ) + + +def pickup_request( + payload: models.PickupRequest, + settings: provider_utils.Settings, +) -> lib.Serializable: + address = lib.to_address(payload.address) + packages = lib.to_packages(payload.parcels) + options = lib.units.Options( + payload.options, + option_type=lib.units.create_enum( + "PickupOptions", + # fmt: off + { + "usps_package_type": lib.OptionEnum("usps_package_type"), + }, + # fmt: on + ), + ) + + # map data to convert karrio model to usps_rest specific type + request = usps_rest.PickupRequestType( + pickupDate=lib.fdate(payload.pickup_date), + pickupAddress=usps_rest.PickupAddressType( + firstName=address.person_name, + lastName=None, + firm=address.company_name, + address=usps_rest.AddressType( + streetAddress=address.address_line1, + secondaryAddress=address.address_line2, + city=address.city, + state=address.state, + ZIPCode=lib.to_zip5(address.postal_code), + ZIPPlus4=lib.to_zip4(address.postal_code) or "", + urbanization=None, + ), + contact=[ + usps_rest.ContactType(email=address.email) + for _ in [address.email] + if _ is not None + ], + ), + packages=[ + usps_rest.PackageType( + packageType=options.usps_package_type.state or "OTHER", + packageCount=len(packages), + ) + ], + estimatedWeight=packages.weight.LB, + pickupLocation=lib.identity( + usps_rest.PickupLocationType( + packageLocation=payload.package_location, + specialInstructions=payload.instruction, + ) + if any([payload.package_location, payload.instruction]) + else None + ), + ) + + return lib.Serializable(request, lib.to_dict) diff --git a/modules/connectors/usps_rest/karrio/providers/usps_rest/pickup/update.py b/modules/connectors/usps_rest/karrio/providers/usps_rest/pickup/update.py new file mode 100644 index 0000000000..47187cc359 --- /dev/null +++ b/modules/connectors/usps_rest/karrio/providers/usps_rest/pickup/update.py @@ -0,0 +1,109 @@ +"""Karrio USPS update pickup implementation.""" + +import karrio.schemas.usps_rest.pickup_update_request as usps_rest +import karrio.schemas.usps_rest.pickup_update_response as pickup + +import typing +import karrio.lib as lib +import karrio.core.units as units +import karrio.core.models as models +import karrio.providers.usps_rest.error as error +import karrio.providers.usps_rest.utils as provider_utils +import karrio.providers.usps_rest.units as provider_units + + +def parse_pickup_update_response( + _response: lib.Deserializable[dict], + settings: provider_utils.Settings, +) -> typing.Tuple[typing.List[models.RateDetails], typing.List[models.Message]]: + response = _response.deserialize() + + messages = error.parse_error_response(response, settings) + pickup = ( + _extract_details(response, settings) + if "confirmationNumber" in response + else None + ) + + return pickup, messages + + +def _extract_details( + data: dict, + settings: provider_utils.Settings, +) -> models.PickupDetails: + details = lib.to_object(pickup.PickupUpdateResponseType, data) + + return models.PickupDetails( + carrier_id=settings.carrier_id, + carrier_name=settings.carrier_name, + confirmation_number=details.confirmationNumber, + pickup_date=lib.fdate(details.pickupDate), + ) + + +def pickup_update_request( + payload: models.PickupUpdateRequest, + settings: provider_utils.Settings, +) -> lib.Serializable: + address = lib.to_address(payload.address) + packages = lib.to_packages(payload.parcels) + options = lib.units.Options( + payload.options, + option_type=lib.units.create_enum( + "PickupOptions", + # fmt: off + { + "usps_package_type": lib.OptionEnum("usps_package_type"), + }, + # fmt: on + ), + ) + + # map data to convert karrio model to usps_rest specific type + request = usps_rest.PickupUpdateRequestType( + pickupDate=lib.fdate(payload.pickup_date), + carrierPickupRequest=usps_rest.CarrierPickupRequestType( + pickupDate=lib.fdate(payload.pickup_date), + pickupAddress=usps_rest.PickupAddressType( + firstName=address.person_name, + lastName=None, + firm=address.company_name, + address=usps_rest.AddressType( + streetAddress=address.address_line1, + secondaryAddress=address.address_line2, + city=address.city, + state=address.state, + ZIPCode=lib.to_zip5(address.postal_code), + ZIPPlus4=lib.to_zip4(address.postal_code) or "", + urbanization=None, + ), + contact=[ + usps_rest.ContactType(email=address.email) + for _ in [address.email] + if _ is not None + ], + ), + packages=[ + usps_rest.PackageType( + packageType=options.usps_package_type.state or "OTHER", + packageCount=len(packages), + ) + ], + estimatedWeight=packages.weight.LB, + pickupLocation=lib.identity( + usps_rest.PickupLocationType( + packageLocation=payload.package_location, + specialInstructions=payload.instruction, + ) + if any([payload.package_location, payload.instruction]) + else None + ), + ), + ) + + return lib.Serializable( + request, + lib.to_dict, + dict(confirmationNumber=payload.confirmation_number), + ) diff --git a/modules/connectors/usps_rest/karrio/providers/usps_rest/rate.py b/modules/connectors/usps_rest/karrio/providers/usps_rest/rate.py new file mode 100644 index 0000000000..c70a7c6635 --- /dev/null +++ b/modules/connectors/usps_rest/karrio/providers/usps_rest/rate.py @@ -0,0 +1,135 @@ +"""Karrio USPS rating API implementation.""" + +import karrio.schemas.usps_rest.rate_request as usps_rest +import karrio.schemas.usps_rest.rate_response as rating + +import time +import typing +import karrio.lib as lib +import karrio.core.units as units +import karrio.core.models as models +import karrio.core.errors as errors +import karrio.providers.usps_rest.error as error +import karrio.providers.usps_rest.utils as provider_utils +import karrio.providers.usps_rest.units as provider_units + + +def parse_rate_response( + _response: lib.Deserializable[dict], + settings: provider_utils.Settings, +) -> typing.Tuple[typing.List[models.RateDetails], typing.List[models.Message]]: + responses = _response.deserialize() + + messages = error.parse_error_response(responses, settings) + rates = lib.to_multi_piece_rates( + [ + ( + f"{_}", + [_extract_details(rate, settings) for rate in response["rateOptions"]], + ) + for _, response in enumerate(responses, start=1) + if response.get("rateOptions") is not None + ] + ) + + return rates, messages + + +def _extract_details( + data: dict, + settings: provider_utils.Settings, +) -> models.RateDetails: + rate = lib.to_object(rating.RateOptionType, data) + mail_class = rate.rates[0].mailClass + service = provider_units.ShippingService.map(mail_class) + charges = [ + ("Base Charge", lib.to_money(rate.totalBasePrice)), + *[(_.description, lib.to_money(_.price)) for _ in rate.rates], + *[(_.name, lib.to_money(_.price)) for _ in rate.extraServices], + ] + + return models.RateDetails( + carrier_id=settings.carrier_id, + carrier_name=settings.carrier_name, + service=service.name_or_key, + total_charge=lib.to_money(rate.totalPrice), + currency="USD", + extra_charges=[ + models.ChargeDetails(name=name, currency="USD", amount=amount) + for name, amount in charges + ], + meta=dict( + service_name=service.name or mail_class, + zone=lib.failsafe(lambda: rate.rates[0].zone), + ), + ) + + +def rate_request( + payload: models.RateRequest, + settings: provider_utils.Settings, +) -> lib.Serializable: + shipper = lib.to_address(payload.shipper) + recipient = lib.to_address(payload.recipient) + + if ( + shipper.country_code is not None + and shipper.country_code != units.Country.US.name + ): + raise errors.OriginNotServicedError(shipper.country_code) + + if ( + recipient.country_code is not None + and recipient.country_code != units.Country.US.name + ): + raise errors.DestinationNotServicedError(recipient.country_code) + + services = lib.to_services(payload.services, provider_units.ShippingService) + options = lib.to_shipping_options( + payload.options, + initializer=provider_units.shipping_options_initializer, + ) + packages = lib.to_packages( + payload.parcels, + options=options, + package_option_type=provider_units.ShippingOption, + shipping_options_initializer=provider_units.shipping_options_initializer, + ) + + # map data to convert karrio model to usps_rest specific type + request = [ + usps_rest.RateRequestType( + originZIPCode=shipper.postal_code, + destinationZIPCode=recipient.postal_code, + weight=package.weight.LB, + length=package.length.IN, + width=package.width.IN, + height=package.height.IN, + # mailClass=None, + mailClasses=[ + service.value + for service in ( + services + if any(services) + else [provider_units.ShippingService.usps_all] + ) + ], + priceType=options.usps_price_type.state or "RETAIL", + mailingDate=lib.fdate( + package.options.shipment_date.state or time.strftime("%Y-%m-%d") + ), + accountType=settings.account_type or "EPS", + accountNumber=settings.account_number, + itemValue=lib.identity( + package.items.value_amount if len(package.items) > 0 else None + ), + extraServices=[ + lib.to_int(_.code) + for __, _ in options.items() + if __ not in provider_units.CUSTOM_OPTIONS + ], + ) + for package in packages + ] + + return lib.Serializable(request, lib.to_dict) diff --git a/modules/connectors/usps_rest/karrio/providers/usps_rest/shipment/__init__.py b/modules/connectors/usps_rest/karrio/providers/usps_rest/shipment/__init__.py new file mode 100644 index 0000000000..a30975e2d1 --- /dev/null +++ b/modules/connectors/usps_rest/karrio/providers/usps_rest/shipment/__init__.py @@ -0,0 +1,8 @@ +from karrio.providers.usps_rest.shipment.create import ( + parse_shipment_response, + shipment_request, +) +from karrio.providers.usps_rest.shipment.cancel import ( + parse_shipment_cancel_response, + shipment_cancel_request, +) diff --git a/modules/connectors/usps_rest/karrio/providers/usps_rest/shipment/cancel.py b/modules/connectors/usps_rest/karrio/providers/usps_rest/shipment/cancel.py new file mode 100644 index 0000000000..b7d6023999 --- /dev/null +++ b/modules/connectors/usps_rest/karrio/providers/usps_rest/shipment/cancel.py @@ -0,0 +1,53 @@ +import typing +import karrio.lib as lib +import karrio.core.models as models +import karrio.providers.usps_rest.error as error +import karrio.providers.usps_rest.utils as provider_utils +import karrio.providers.usps_rest.units as provider_units + + +def parse_shipment_cancel_response( + _response: lib.Deserializable[typing.List[typing.Tuple[str, dict]]], + settings: provider_utils.Settings, +) -> typing.Tuple[models.ConfirmationDetails, typing.List[models.Message]]: + responses = _response.deserialize() + messages: typing.List[models.Message] = sum( + [ + error.parse_error_response(response, settings, tracking_number=_) + for _, response in responses + ], + start=[], + ) + success = all([_["ok"] for __, _ in responses]) + + confirmation = ( + models.ConfirmationDetails( + carrier_id=settings.carrier_id, + carrier_name=settings.carrier_name, + operation="Cancel Shipment", + success=success, + ) + if success + else None + ) + + return confirmation, messages + + +def shipment_cancel_request( + payload: models.ShipmentCancelRequest, + settings: provider_utils.Settings, +) -> lib.Serializable: + + # map data to convert karrio model to usps_rest specific type + request = [ + dict(trackingNumber=_) + for _ in set( + [ + payload.shipment_identifier, + *((payload.options or {}).get("shipment_identifiers") or []), + ] + ) + ] + + return lib.Serializable(request, lib.to_dict) diff --git a/modules/connectors/usps_rest/karrio/providers/usps_rest/shipment/create.py b/modules/connectors/usps_rest/karrio/providers/usps_rest/shipment/create.py new file mode 100644 index 0000000000..bb47e0e660 --- /dev/null +++ b/modules/connectors/usps_rest/karrio/providers/usps_rest/shipment/create.py @@ -0,0 +1,263 @@ +"""Karrio USPS create label implementation.""" + +import karrio.schemas.usps_rest.label_request as usps_rest +import karrio.schemas.usps_rest.label_response as shipping + +import time +import typing +import karrio.lib as lib +import karrio.core.units as units +import karrio.core.models as models +import karrio.core.errors as errors +import karrio.providers.usps_rest.error as error +import karrio.providers.usps_rest.utils as provider_utils +import karrio.providers.usps_rest.units as provider_units + + +def parse_shipment_response( + _response: lib.Deserializable[typing.List[dict]], + settings: provider_utils.Settings, +) -> typing.Tuple[typing.List[models.RateDetails], typing.List[models.Message]]: + responses = _response.deserialize() + + shipment = lib.to_multi_piece_shipment( + [ + ( + f"{_}", + _extract_details(response, settings, _response.ctx), + ) + for _, response in enumerate(responses, start=1) + if response.get("error") is None + ] + ) + messages: typing.List[models.Message] = sum( + [error.parse_error_response(response, settings) for response in responses], + start=[], + ) + + return shipment, messages + + +def _extract_details( + data: dict, + settings: provider_utils.Settings, + ctx: dict = None, +) -> models.ShipmentDetails: + details = lib.to_object(shipping.LabelResponseType, data) + label = details.labelImage + invoice = details.receiptImage + label_type = ctx.get("label_type", "PDF") + + return models.ShipmentDetails( + carrier_id=settings.carrier_id, + carrier_name=settings.carrier_name, + tracking_number=details.labelMetadata.trackingNumber, + shipment_identifier=details.labelMetadata.trackingNumber, + label_type=label_type, + docs=models.Documents(label=label, invoice=invoice), + meta=dict( + SKU=details.labelMetadata.SKU, + postage=details.labelMetadata.postage, + routingInformation=details.labelMetadata.routingInformation, + labelBrokerID=details.labelMetadata.labelBrokerID, + ), + ) + + +def shipment_request( + payload: models.ShipmentRequest, + settings: provider_utils.Settings, +) -> lib.Serializable: + shipper = lib.to_address(payload.shipper) + recipient = lib.to_address(payload.recipient) + + if ( + shipper.country_code is not None + and shipper.country_code != units.Country.US.name + ): + raise errors.OriginNotServicedError(shipper.country_code) + + if ( + recipient.country_code is not None + and recipient.country_code != units.Country.US.name + ): + raise errors.DestinationNotServicedError(recipient.country_code) + + return_address = lib.to_address(payload.return_address) + service = provider_units.ShippingService.map(payload.service).value_or_key + options = lib.to_shipping_options( + payload.options, + initializer=provider_units.shipping_options_initializer, + ) + packages = lib.to_packages( + payload.parcels, + options=options, + package_option_type=provider_units.ShippingOption, + shipping_options_initializer=provider_units.shipping_options_initializer, + ) + pickup_location = lib.to_address(options.hold_for_pickup_address.state) + label_type = provider_units.LabelType.map(payload.label_type).value or "PDF" + + # map data to convert karrio model to usps_rest specific type + request = [ + usps_rest.LabelRequestType( + imageInfo=usps_rest.ImageInfoType( + imageType=label_type, + labelType="4X6LABEL", + # shipInfo=None, + receiptOption="SEPARATE_PAGE", + suppressPostage=None, + suppressMailDate=None, + returnLabel=None, + ), + toAddress=usps_rest.AddressType( + streetAddress=recipient.address_line1, + secondaryAddress=recipient.address_line2, + city=recipient.city, + state=recipient.state, + ZIPCode=lib.to_zip5(recipient.postal_code) or "", + ZIPPlus4=lib.to_zip4(recipient.postal_code) or "", + urbanization=None, + firstName=recipient.person_name, + lastName=None, + firm=recipient.company_name, + phone=recipient.phone_number, + email=recipient.email, + ignoreBadAddress=True, + platformUserId=None, + parcelLockerDelivery=None, + holdForPickup=package.options.usps_hold_for_pickup.state, + facilityId=package.options.usps_facility_id.state, + ), + fromAddress=usps_rest.AddressType( + streetAddress=shipper.address_line1, + secondaryAddress=shipper.address_line2, + city=shipper.city, + state=shipper.state, + ZIPCode=lib.to_zip4(shipper.postal_code) or "", + ZIPPlus4=lib.to_zip5(shipper.postal_code) or "", + urbanization=None, + firstName=shipper.person_name, + lastName=None, + firm=shipper.company_name, + phone=shipper.phone_number, + email=shipper.email, + ignoreBadAddress=True, + platformUserId=None, + parcelLockerDelivery=None, + holdForPickup=None, + facilityId=None, + ), + senderAddress=usps_rest.AddressType( + streetAddress=shipper.address_line1, + secondaryAddress=shipper.address_line2r, + city=shipper.city, + state=shipper.state, + ZIPCode=lib.to_zip4(shipper.postal_code) or "", + ZIPPlus4=lib.to_zip5(shipper.postal_code) or "", + urbanization=None, + firstName=shipper.person_name, + lastName=None, + firm=shipper.company_name, + phone=shipper.phone_number, + email=shipper.email, + ignoreBadAddress=True, + platformUserId=None, + parcelLockerDelivery=None, + holdForPickup=None, + facilityId=None, + ), + returnAddress=lib.identity( + usps_rest.AddressType( + streetAddress=return_address.address_line1, + secondaryAddress=return_address.address_line2r, + city=return_address.city, + state=return_address.state, + ZIPCode=lib.to_zip4(return_address.postal_code) or "", + ZIPPlus4=lib.to_zip5(return_address.postal_code) or "", + urbanization=None, + firstName=return_address.person_name, + lastName=None, + firm=return_address.company_name, + phone=return_address.phone_number, + email=return_address.email, + ignoreBadAddress=True, + platformUserId=None, + parcelLockerDelivery=None, + holdForPickup=None, + facilityId=None, + ) + if payload.return_address is not None + else None + ), + packageDescription=usps_rest.PackageDescriptionType( + weightUOM="lb", + weight=package.weight.LB, + dimensionsUOM="in", + length=package.length.IN, + height=package.height.IN, + width=package.width.IN, + girth=package.girth.value, + mailClass=service, + rateIndicator=package.options.usps_rate_indicator.state or "SP", + processingCategory=lib.identity( + package.options.usps_processing_category.state or "NON_MACHINABLE" + ), + destinationEntryFacilityType=lib.identity( + package.options.usps_destination_facility_type.state or "NONE" + ), + destinationEntryFacilityAddress=lib.identity( + usps_rest.DestinationEntryFacilityAddressType( + streetAddress=pickup_location.address_line1, + secondaryAddress=pickup_location.address_line2r, + city=pickup_location.city, + state=pickup_location.state, + ZIPCode=lib.to_zip4(pickup_location.postal_code) or "", + ZIPPlus4=lib.to_zip5(pickup_location.postal_code) or "", + urbanization=None, + ) + if package.options.hold_for_pickup_address.state is not None + else None + ), + packageOptions=lib.identity( + usps_rest.PackageOptionsType( + packageValue=package.total_value, + nonDeliveryOption=None, + redirectAddress=None, + contentType=None, + generateGXEvent=None, + containers=[], + ancillaryServiceEndorsements=None, + originalPackage=None, + ) + if (package.total_value or 0.0) > 0.0 + else None + ), + customerReference=[ + usps_rest.CustomerReferenceType( + referenceNumber=reference, + printReferenceNumber=True, + ) + for reference in [payload.reference] + if reference is not None + ], + extraServices=[ + lib.to_int(_.code) + for __, _ in package.options.items() + if _.name not in provider_units.CUSTOM_OPTIONS + ], + mailingDate=lib.fdate( + package.options.shipment_date.state or time.strftime("%Y-%m-%d") + ), + carrierRelease=package.options.usps_carrier_release.state, + physicalSignatureRequired=package.options.usps_physical_signature_required.state, + inductionZIPCode=lib.identity( + return_address.postal_code or shipper.postal_code + ), + ), + customsForm=None, + ) + for package in packages + ] + + return lib.Serializable(request, lib.to_dict, dict(label_type=label_type)) diff --git a/modules/connectors/usps_rest/karrio/providers/usps_rest/tracking.py b/modules/connectors/usps_rest/karrio/providers/usps_rest/tracking.py new file mode 100644 index 0000000000..96503f3ef2 --- /dev/null +++ b/modules/connectors/usps_rest/karrio/providers/usps_rest/tracking.py @@ -0,0 +1,99 @@ +"""Karrio USPS rating API implementation.""" + +# import karrio.schemas.usps_rest.tracking_request as usps_rest +import karrio.schemas.usps_rest.tracking_response as tracking + +import typing +import karrio.lib as lib +import karrio.core.units as units +import karrio.core.models as models +import karrio.providers.usps_rest.error as error +import karrio.providers.usps_rest.utils as provider_utils +import karrio.providers.usps_rest.units as provider_units + + +def parse_tracking_response( + _response: lib.Deserializable[typing.List[typing.Tuple[str, dict]]], + settings: provider_utils.Settings, +) -> typing.Tuple[typing.List[models.TrackingDetails], typing.List[models.Message]]: + responses = _response.deserialize() + + messages: typing.List[models.Message] = sum( + [ + error.parse_error_response(response, settings, tracking_number=_) + for _, response in responses + ], + start=[], + ) + tracking_details = [ + _extract_details(details, settings) + for _, details in responses + if "error" not in details + ] + + return tracking_details, messages + + +def _extract_details( + data: dict, + settings: provider_utils.Settings, +) -> models.TrackingDetails: + details = lib.to_object(tracking.TrackingResponseType, data) + status = next( + ( + status.name + for status in list(provider_units.TrackingStatus) + if getattr(details, "status", None) in status.value + ), + provider_units.TrackingStatus.in_transit.name, + ) + + return models.TrackingDetails( + carrier_id=settings.carrier_id, + carrier_name=settings.carrier_name, + tracking_number=details.trackingNumber, + events=[ + models.TrackingEvent( + date=lib.fdate(event.eventTimestamp, "%Y-%m-%dT%H:%M:%SZ"), + description=event.name, + code=event.eventType, + time=lib.flocaltime(event.eventTimestamp, "%Y-%m-%dT%H:%M:%SZ"), + location=lib.text( + event.eventCity, + event.eventZIP, + event.eventState, + event.eventCountry, + separator=", ", + ), + ) + for event in details.trackingEvents + ], + estimated_delivery=lib.fdate( + details.expectedDeliveryTimeStamp, + "%Y-%m-%dT%H:%M:%SZ", + ), + delivered=status == "delivered", + status=status, + info=models.TrackingInfo( + # fmt: off + carrier_tracking_link=settings.tracking_url.format(details.trackingNumber), + expected_delivery=lib.fdate(details.expectedDeliveryTimeStamp, "%Y-%m-%dT%H:%M:%SZ"), + shipment_service=provider_units.ShippingService.map(details.serviceTypeCode).name_or_key, + shipment_origin_country=details.originCountry, + shipment_origin_postal_code=details.originZIP, + shipment_destination_country=details.destinationCountryCode, + shipment_destination_postal_code=details.destinationZIP, + # fmt: on + ), + ) + + +def tracking_request( + payload: models.TrackingRequest, + settings: provider_utils.Settings, +) -> lib.Serializable: + + # map data to convert karrio model to usps_rest specific type + request = payload.tracking_numbers + + return lib.Serializable(request, lib.to_dict) diff --git a/modules/connectors/usps_rest/karrio/providers/usps_rest/units.py b/modules/connectors/usps_rest/karrio/providers/usps_rest/units.py new file mode 100644 index 0000000000..230506199c --- /dev/null +++ b/modules/connectors/usps_rest/karrio/providers/usps_rest/units.py @@ -0,0 +1,198 @@ +import karrio.lib as lib +import karrio.core.units as units + + +class PackagingType(lib.StrEnum): + """Carrier specific packaging type""" + + PACKAGE = "PACKAGE" + + """ Unified Packaging type mapping """ + envelope = PACKAGE + pak = PACKAGE + tube = PACKAGE + pallet = PACKAGE + small_box = PACKAGE + medium_box = PACKAGE + your_packaging = PACKAGE + + +class ContentType(lib.StrEnum): + HAZMAT = "HAZMAT" + CREMATED_REMAINS = "CREMATED_REMAINS" + BEES = "BEES" + DAY_OLD_POULTRY = "DAY_OLD_POULTRY" + ADULT_BIRDS = "ADULT_BIRDS" + OTHER_LIVES = "OTHER_LIVES" + PERISHABLE = "PERISHABLE" + PHARMACEUTICALS = "PHARMACEUTICALS" + MEDICAL_SUPPLIES = "MEDICAL_SUPPLIES" + FRUITS = "FRUITS" + VEGETABLES = "VEGETABLES" + LIVE_PLANTS = "LIVE_PLANTS" + + +class LabelType(lib.StrEnum): + """Carrier specific label type""" + + PDF = "PDF" + TIFF = "TIFF" + JPG = "JPG" + SVG = "SVG" + ZPL203DPI = "ZPL203DPI" + ZPL300DPI = "ZPL300DPI" + LABEL_BROKER = "LABEL_BROKER" + NONE = "NONE" + + """ Unified Label type mapping """ + ZPL = ZPL300DPI + PNG = JPG + + +class ShippingService(lib.StrEnum): + """Carrier specific services""" + + usps_standard_service = "USPS Standard Service" + usps_parcel_select = "PARCEL_SELECT" + usps_parcel_select_lightweight = "PARCEL_SELECT_LIGHTWEIGHT" + usps_priority_mail_express = "PRIORITY_MAIL_EXPRESS" + usps_priority_mail = "PRIORITY_MAIL" + usps_first_class_package_service = "FIRST-CLASS_PACKAGE_SERVICE" + usps_library_mail = "LIBRARY_MAIL" + usps_media_mail = "MEDIA_MAIL" + usps_bound_printed_matter = "BOUND_PRINTED_MATTER" + usps_connect_local = "USPS_CONNECT_LOCAL" + usps_connect_mail = "USPS_CONNECT_MAIL" + usps_connect_next_day = "USPS_CONNECT_NEXT_DAY" + usps_connect_regional = "USPS_CONNECT_REGIONAL" + usps_connect_same_day = "USPS_CONNECT_SAME_DAY" + usps_ground_advantage = "USPS_GROUND_ADVANTAGE" + usps_retail_ground = "USPS_RETAIL_GROUND" + usps_all = "ALL" + + +class ShippingOption(lib.Enum): + """Carrier specific options""" + + # fmt: off + usps_label_delivery_service = lib.OptionEnum("415", bool) + usps_tracking_plus_6_months = lib.OptionEnum("480", bool) + usps_tracking_plus_1_year = lib.OptionEnum("481", bool) + usps_tracking_plus_3_years = lib.OptionEnum("482", bool) + usps_tracking_plus_5_years = lib.OptionEnum("483", bool) + usps_tracking_plus_7_years = lib.OptionEnum("484", bool) + usps_tracking_plus_10_years = lib.OptionEnum("485", bool) + usps_tracking_plus_signature_3_years = lib.OptionEnum("486", bool) + usps_tracking_plus_signature_5_years = lib.OptionEnum("487", bool) + usps_tracking_plus_signature_7_years = lib.OptionEnum("488", bool) + usps_tracking_plus_signature_10_years = lib.OptionEnum("489", bool) + usps_hazardous_materials_air_eligible_ethanol = lib.OptionEnum("810", bool) + usps_hazardous_materials_class_1_toy_propellant_safety_fuse_package = lib.OptionEnum("811", bool) + usps_hazardous_materials_class_3_flammable_and_combustible_liquids = lib.OptionEnum("812", bool) + usps_hazardous_materials_class_7_radioactive_materials = lib.OptionEnum("813", bool) + usps_hazardous_materials_class_8_air_eligible_corrosive_materials = lib.OptionEnum("814", bool) + usps_hazardous_materials_class_8_nonspillable_wet_batteries = lib.OptionEnum("815", bool) + usps_hazardous_materials_class_9_lithium_battery_marked_ground_only = lib.OptionEnum("816", bool) + usps_hazardous_materials_class_9_lithium_battery_returns = lib.OptionEnum("817", bool) + usps_hazardous_materials_class_9_marked_lithium_batteries = lib.OptionEnum("818", bool) + usps_hazardous_materials_class_9_dry_ice = lib.OptionEnum("819", bool) + usps_hazardous_materials_class_9_unmarked_lithium_batteries = lib.OptionEnum("820", bool) + usps_hazardous_materials_class_9_magnetized_materials = lib.OptionEnum("821", bool) + usps_hazardous_materials_division_4_1_mailable_flammable_solids_and_safety_matches = lib.OptionEnum("822", bool) + usps_hazardous_materials_division_5_1_oxidizers = lib.OptionEnum("823", bool) + usps_hazardous_materials_division_5_2_organic_peroxides = lib.OptionEnum("824", bool) + usps_hazardous_materials_division_6_1_toxic_materials = lib.OptionEnum("825", bool) + usps_hazardous_materials_division_6_2_biological_materials = lib.OptionEnum("826", bool) + usps_hazardous_materials_excepted_quantity_provision = lib.OptionEnum("827", bool) + usps_hazardous_materials_ground_only_hazardous_materials = lib.OptionEnum("828", bool) + usps_hazardous_materials_air_eligible_id8000_consumer_commodity = lib.OptionEnum("829", bool) + usps_hazardous_materials_lighters = lib.OptionEnum("830", bool) + usps_hazardous_materials_limited_quantity_ground = lib.OptionEnum("831", bool) + usps_hazardous_materials_small_quantity_provision_markings_required = lib.OptionEnum("832", bool) + usps_hazardous_materials = lib.OptionEnum("857", bool) + usps_certified_mail = lib.OptionEnum("910", bool) + usps_certified_mail_restricted_delivery = lib.OptionEnum("911", bool) + usps_certified_mail_adult_signature_required = lib.OptionEnum("912", bool) + usps_certified_mail_adult_signature_restricted_delivery = lib.OptionEnum("913", bool) + usps_collect_on_delivery = lib.OptionEnum("915", float) + usps_collect_on_delivery_restricted_delivery = lib.OptionEnum("917", bool) + usps_tracking_electronic = lib.OptionEnum("920", bool) + usps_signature_confirmation = lib.OptionEnum("921", bool) + usps_adult_signature_required = lib.OptionEnum("922", bool) + usps_adult_signature_restricted_delivery = lib.OptionEnum("923", bool) + usps_signature_confirmation_restricted_delivery = lib.OptionEnum("924", bool) + usps_priority_mail_express_merchandise_insurance = lib.OptionEnum("925", bool) + usps_insurance_bellow_500 = lib.OptionEnum("930", float) + usps_insurance_above_500 = lib.OptionEnum("931", float) + usps_insurance_restricted_delivery = lib.OptionEnum("934", bool) + usps_registered_mail = lib.OptionEnum("940", bool) + usps_registered_mail_restricted_delivery = lib.OptionEnum("941", bool) + usps_return_receipt = lib.OptionEnum("955", bool) + usps_return_receipt_electronic = lib.OptionEnum("957", bool) + usps_signature_requested_priority_mail_express_only = lib.OptionEnum("981", bool) + usps_parcel_locker_delivery = lib.OptionEnum("984", bool) + usps_po_to_addressee_priority_mail_express_only = lib.OptionEnum("986", bool) + usps_sunday_delivery = lib.OptionEnum("981", bool) + # fmt: on + + """ Custom Options """ + usps_price_type = lib.OptionEnum("priceType") + usps_facility_id = lib.OptionEnum("facilityId") + usps_hold_for_pickup = lib.OptionEnum("holdForPickup", bool) + usps_rate_indicator = lib.OptionEnum("rateIndicator") + usps_processing_category = lib.OptionEnum("processingCategory") + usps_carrier_release = lib.OptionEnum("carrierRelease", bool) + usps_physical_signature_required = lib.OptionEnum("physicalSignatureRequired", bool) + usps_restriction_type = lib.OptionEnum("restrictionType") + + """ Unified Option type mapping """ + cash_on_delivery = usps_collect_on_delivery + signature_confirmation = usps_signature_confirmation + sunday_delivery = usps_sunday_delivery + hold_at_location = usps_hold_for_pickup + + +CUSTOM_OPTIONS = [ + ShippingOption.usps_price_type.name, + ShippingOption.usps_facility_id.name, + ShippingOption.usps_hold_for_pickup.name, + ShippingOption.usps_rate_indicator.name, + ShippingOption.usps_processing_category.name, + ShippingOption.usps_carrier_release.name, + ShippingOption.usps_physical_signature_required.name, +] + + +def shipping_options_initializer( + options: dict, + package_options: units.ShippingOptions = None, +) -> units.ShippingOptions: + """ + Apply default values to the given options. + """ + + if package_options is not None: + options.update(package_options.content) + + if "insurance" in options: + if lib.to_money(options["insurance"]) > 500: + options[ShippingOption.usps_insurance_above_500.name] = options["insurance"] + else: + options[ShippingOption.usps_insurance_bellow_500.name] = options[ + "insurance" + ] + + def items_filter(key: str) -> bool: + return key in ShippingOption # type: ignore + + return units.ShippingOptions(options, ShippingOption, items_filter=items_filter) + + +class TrackingStatus(lib.Enum): + on_hold = ["on_hold"] + delivered = ["delivered"] + in_transit = ["in_transit"] + delivery_failed = ["delivery_failed"] + delivery_delayed = ["delivery_delayed"] + out_for_delivery = ["out_for_delivery"] + ready_for_pickup = ["ready_for_pickup"] diff --git a/modules/connectors/usps_rest/karrio/providers/usps_rest/utils.py b/modules/connectors/usps_rest/karrio/providers/usps_rest/utils.py new file mode 100644 index 0000000000..eb68967ce5 --- /dev/null +++ b/modules/connectors/usps_rest/karrio/providers/usps_rest/utils.py @@ -0,0 +1,87 @@ +import datetime +import karrio.lib as lib +import karrio.core as core +import karrio.core.errors as errors + + +class Settings(core.Settings): + """USPS connection settings.""" + + # Add carrier specific api connection properties here + client_id: str + client_secret: str + account_type: str = None + account_number: str = None + + @property + def carrier_name(self): + return "usps_rest" + + @property + def server_url(self): + return "https://api.usps.com" + + @property + def tracking_url(self): + return "https://tools.usps.com/go/TrackConfirmAction?tLabels={}" + + @property + def connection_config(self) -> lib.units.Options: + return lib.to_connection_config( + self.config or {}, + option_type=ConnectionConfig, + ) + + @property + def access_token(self): + """Retrieve the access_token using the client_id|client_secret pair + or collect it from the cache if an unexpired access_token exist. + """ + cache_key = f"{self.carrier_name}|{self.client_id}|{self.client_secret}" + now = datetime.datetime.now() + datetime.timedelta(minutes=30) + + auth = self.connection_cache.get(cache_key) or {} + token = auth.get("access_token") + expiry = lib.to_date(auth.get("expiry"), current_format="%Y-%m-%d %H:%M:%S") + + if token is not None and expiry is not None and expiry > now: + return token + + self.connection_cache.set(cache_key, lambda: login(self)) + new_auth = self.connection_cache.get(cache_key) + + return new_auth["access_token"] + + +def login(settings: Settings, client_id: str = None, client_secret: str = None): + import karrio.providers.usps_rest.error as error + + result = lib.request( + url=f"{settings.server_url}/oauth2/v3/token", + method="POST", + headers={"content-Type": "application/x-www-form-urlencoded"}, + data=lib.to_query_string( + dict( + grant_type="client_credentials", + client_id=client_id, + client_secret=client_secret, + ) + ), + ) + + response = lib.to_dict(result) + messages = error.parse_error_response(response, settings) + + if any(messages): + raise errors.ShippingSDKError(messages) + + expiry = datetime.datetime.now() + datetime.timedelta( + seconds=float(response.get("expires_in", 0)) + ) + + return {**response, "expiry": lib.fdatetime(expiry)} + + +class ConnectionConfig(lib.Enum): + shipping_options = lib.OptionEnum("shipping_options", list) + shipping_services = lib.OptionEnum("shipping_services", list) diff --git a/modules/connectors/usps_rest/karrio/schemas/usps_rest/__init__.py b/modules/connectors/usps_rest/karrio/schemas/usps_rest/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/modules/connectors/usps/karrio/schemas/usps/error_response.py b/modules/connectors/usps_rest/karrio/schemas/usps_rest/error_response.py similarity index 100% rename from modules/connectors/usps/karrio/schemas/usps/error_response.py rename to modules/connectors/usps_rest/karrio/schemas/usps_rest/error_response.py diff --git a/modules/connectors/usps/karrio/schemas/usps/label_request.py b/modules/connectors/usps_rest/karrio/schemas/usps_rest/label_request.py similarity index 100% rename from modules/connectors/usps/karrio/schemas/usps/label_request.py rename to modules/connectors/usps_rest/karrio/schemas/usps_rest/label_request.py diff --git a/modules/connectors/usps/karrio/schemas/usps/label_response.py b/modules/connectors/usps_rest/karrio/schemas/usps_rest/label_response.py similarity index 100% rename from modules/connectors/usps/karrio/schemas/usps/label_response.py rename to modules/connectors/usps_rest/karrio/schemas/usps_rest/label_response.py diff --git a/modules/connectors/usps/karrio/schemas/usps/pickup_request.py b/modules/connectors/usps_rest/karrio/schemas/usps_rest/pickup_request.py similarity index 100% rename from modules/connectors/usps/karrio/schemas/usps/pickup_request.py rename to modules/connectors/usps_rest/karrio/schemas/usps_rest/pickup_request.py diff --git a/modules/connectors/usps/karrio/schemas/usps/pickup_response.py b/modules/connectors/usps_rest/karrio/schemas/usps_rest/pickup_response.py similarity index 100% rename from modules/connectors/usps/karrio/schemas/usps/pickup_response.py rename to modules/connectors/usps_rest/karrio/schemas/usps_rest/pickup_response.py diff --git a/modules/connectors/usps/karrio/schemas/usps/pickup_update_request.py b/modules/connectors/usps_rest/karrio/schemas/usps_rest/pickup_update_request.py similarity index 100% rename from modules/connectors/usps/karrio/schemas/usps/pickup_update_request.py rename to modules/connectors/usps_rest/karrio/schemas/usps_rest/pickup_update_request.py diff --git a/modules/connectors/usps/karrio/schemas/usps/pickup_update_response.py b/modules/connectors/usps_rest/karrio/schemas/usps_rest/pickup_update_response.py similarity index 100% rename from modules/connectors/usps/karrio/schemas/usps/pickup_update_response.py rename to modules/connectors/usps_rest/karrio/schemas/usps_rest/pickup_update_response.py diff --git a/modules/connectors/usps/karrio/schemas/usps/rate_request.py b/modules/connectors/usps_rest/karrio/schemas/usps_rest/rate_request.py similarity index 100% rename from modules/connectors/usps/karrio/schemas/usps/rate_request.py rename to modules/connectors/usps_rest/karrio/schemas/usps_rest/rate_request.py diff --git a/modules/connectors/usps/karrio/schemas/usps/rate_response.py b/modules/connectors/usps_rest/karrio/schemas/usps_rest/rate_response.py similarity index 100% rename from modules/connectors/usps/karrio/schemas/usps/rate_response.py rename to modules/connectors/usps_rest/karrio/schemas/usps_rest/rate_response.py diff --git a/modules/connectors/usps/karrio/schemas/usps/scan_form_request.py b/modules/connectors/usps_rest/karrio/schemas/usps_rest/scan_form_request.py similarity index 100% rename from modules/connectors/usps/karrio/schemas/usps/scan_form_request.py rename to modules/connectors/usps_rest/karrio/schemas/usps_rest/scan_form_request.py diff --git a/modules/connectors/usps/karrio/schemas/usps/scan_form_response.py b/modules/connectors/usps_rest/karrio/schemas/usps_rest/scan_form_response.py similarity index 100% rename from modules/connectors/usps/karrio/schemas/usps/scan_form_response.py rename to modules/connectors/usps_rest/karrio/schemas/usps_rest/scan_form_response.py diff --git a/modules/connectors/usps/karrio/schemas/usps/tracking_response.py b/modules/connectors/usps_rest/karrio/schemas/usps_rest/tracking_response.py similarity index 100% rename from modules/connectors/usps/karrio/schemas/usps/tracking_response.py rename to modules/connectors/usps_rest/karrio/schemas/usps_rest/tracking_response.py diff --git a/modules/connectors/usps/schemas/error_response.json b/modules/connectors/usps_rest/schemas/error_response.json similarity index 100% rename from modules/connectors/usps/schemas/error_response.json rename to modules/connectors/usps_rest/schemas/error_response.json diff --git a/modules/connectors/usps/schemas/label_request.json b/modules/connectors/usps_rest/schemas/label_request.json similarity index 100% rename from modules/connectors/usps/schemas/label_request.json rename to modules/connectors/usps_rest/schemas/label_request.json diff --git a/modules/connectors/usps/schemas/label_response.json b/modules/connectors/usps_rest/schemas/label_response.json similarity index 100% rename from modules/connectors/usps/schemas/label_response.json rename to modules/connectors/usps_rest/schemas/label_response.json diff --git a/modules/connectors/usps/schemas/pickup_request.json b/modules/connectors/usps_rest/schemas/pickup_request.json similarity index 100% rename from modules/connectors/usps/schemas/pickup_request.json rename to modules/connectors/usps_rest/schemas/pickup_request.json diff --git a/modules/connectors/usps/schemas/pickup_response.json b/modules/connectors/usps_rest/schemas/pickup_response.json similarity index 100% rename from modules/connectors/usps/schemas/pickup_response.json rename to modules/connectors/usps_rest/schemas/pickup_response.json diff --git a/modules/connectors/usps/schemas/pickup_update_request.json b/modules/connectors/usps_rest/schemas/pickup_update_request.json similarity index 100% rename from modules/connectors/usps/schemas/pickup_update_request.json rename to modules/connectors/usps_rest/schemas/pickup_update_request.json diff --git a/modules/connectors/usps/schemas/pickup_update_response.json b/modules/connectors/usps_rest/schemas/pickup_update_response.json similarity index 100% rename from modules/connectors/usps/schemas/pickup_update_response.json rename to modules/connectors/usps_rest/schemas/pickup_update_response.json diff --git a/modules/connectors/usps/schemas/rate_request.json b/modules/connectors/usps_rest/schemas/rate_request.json similarity index 100% rename from modules/connectors/usps/schemas/rate_request.json rename to modules/connectors/usps_rest/schemas/rate_request.json diff --git a/modules/connectors/usps/schemas/rate_response.json b/modules/connectors/usps_rest/schemas/rate_response.json similarity index 100% rename from modules/connectors/usps/schemas/rate_response.json rename to modules/connectors/usps_rest/schemas/rate_response.json diff --git a/modules/connectors/usps/schemas/scan_form_request.json b/modules/connectors/usps_rest/schemas/scan_form_request.json similarity index 100% rename from modules/connectors/usps/schemas/scan_form_request.json rename to modules/connectors/usps_rest/schemas/scan_form_request.json diff --git a/modules/connectors/usps/schemas/scan_form_response.json b/modules/connectors/usps_rest/schemas/scan_form_response.json similarity index 100% rename from modules/connectors/usps/schemas/scan_form_response.json rename to modules/connectors/usps_rest/schemas/scan_form_response.json diff --git a/modules/connectors/usps/schemas/tracking_response.json b/modules/connectors/usps_rest/schemas/tracking_response.json similarity index 100% rename from modules/connectors/usps/schemas/tracking_response.json rename to modules/connectors/usps_rest/schemas/tracking_response.json diff --git a/modules/connectors/usps_rest/setup.py b/modules/connectors/usps_rest/setup.py new file mode 100644 index 0000000000..fb0fdd0d78 --- /dev/null +++ b/modules/connectors/usps_rest/setup.py @@ -0,0 +1,27 @@ +"""Warning: This setup.py is only there for git install until poetry support git subdirectory""" + +from setuptools import setup, find_namespace_packages + +with open("README.md", "r") as fh: + long_description = fh.read() + +setup( + name="karrio.usps_rest", + version="2024.6-rc22", + description="Karrio - USPS Shipping Extension", + long_description=long_description, + long_description_content_type="text/markdown", + url="https://github.com/karrioapi/karrio", + author="karrio", + author_email="hello@karrio.io", + license="Apache-2.0", + packages=find_namespace_packages(exclude=["tests.*", "tests"]), + install_requires=["karrio"], + classifiers=[ + "Intended Audience :: Developers", + "Operating System :: OS Independent", + "Programming Language :: Python :: 3", + ], + zip_safe=False, + include_package_data=True, +) diff --git a/modules/connectors/usps_rest/tests/__init__.py b/modules/connectors/usps_rest/tests/__init__.py new file mode 100644 index 0000000000..b32d92a7dc --- /dev/null +++ b/modules/connectors/usps_rest/tests/__init__.py @@ -0,0 +1,5 @@ +from tests.usps_rest.test_rate import * +from tests.usps_rest.test_pickup import * +from tests.usps_rest.test_tracking import * +from tests.usps_rest.test_shipment import * +from tests.usps_rest.test_manifest import * diff --git a/modules/connectors/usps_rest/tests/usps_rest/__init__.py b/modules/connectors/usps_rest/tests/usps_rest/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/modules/connectors/usps_rest/tests/usps_rest/fixture.py b/modules/connectors/usps_rest/tests/usps_rest/fixture.py new file mode 100644 index 0000000000..a3bb38cf23 --- /dev/null +++ b/modules/connectors/usps_rest/tests/usps_rest/fixture.py @@ -0,0 +1,33 @@ +import karrio +import datetime +import karrio.lib as lib + +expiry = datetime.datetime.now() + datetime.timedelta(days=1) +client_id = "client_id" +client_secret = "client_secret" +cached_auth = { + f"usps_rest|{client_id}|{client_secret}": dict( + token_type="Bearer", + issued_at="1685542319575", + client_id=client_id, + access_token="access_token", + scope="addresses international-prices subscriptions payments pickup tracking labels scan-forms companies service-delivery-standards locations international-labels prices", + expires_in="14399", + refresh_count="0", + status="approved", + expiry=expiry.strftime("%Y-%m-%d %H:%M:%S"), + issuer="api.usps_rest.com", + application_name="Silver Shipper Developer", + api_products="[Shipping-Silver]", + public_key="LS0tLS1CRUdJTiBQVUJMSUMgS0VZLS0tLS0KTUlJQklqQU5CZ2txaGtpRzl3MEJBUUVGQUFPQ0FROEFNSUlCQ2dLQ0FRRUF4QWxwZjNSNEE1S0lwZnhJVWk1bgpMTFByZjZVZTV3MktzeGxSVzE1UWV0UzBjWGVxaW9OT2hXbDNaaVhEWEdKT3ZuK3RoY0NWVVQ3WC9JZWYvTENZCkhUWk1kYUJOdW55VHEwT2RNZmVkUU8zYUNKZmwvUnJPTHYyaG9TRDR4U1YxRzFuTTc1RTlRYitFZ1p0cmFEUXoKNW42SXRpMUMzOHFGMjU5NVRHUWVUemx3Wk1LQng1VTY2bGwzNzlkZ2plTUJxS3ppVHZHWEpOdVg5ZzRrRlBIaApTLzNERm9FNkVFSW8zUHExeDlXTnRaSm93VkRwQUVZZTQ3SU1UdXJDN2NGcXp2d3M1b1BDRHQ4c083N2lUdDN0Cm1vK3NrM2ExWnZSaGs2WUQ3Zkt1UldQVzFEYUM4dC9pazlnWnhqQndYNlZsSUhDRzRZSHlYejZteWdGV09jMmEKOVFJREFRQUIKLS0tLS1FTkQgUFVCTElDIEtFWS0tLS0t", + ) +} + +gateway = karrio.gateway["usps_rest"].create( + dict( + client_id="client_id", + client_secret="client_secret", + account_number="Your Account Number", + ), + cache=lib.Cache(**cached_auth), +) diff --git a/modules/connectors/usps/tests/usps/test_manifest.py b/modules/connectors/usps_rest/tests/usps_rest/test_manifest.py similarity index 98% rename from modules/connectors/usps/tests/usps/test_manifest.py rename to modules/connectors/usps_rest/tests/usps_rest/test_manifest.py index 4ceaebafb9..29befa949b 100644 --- a/modules/connectors/usps/tests/usps/test_manifest.py +++ b/modules/connectors/usps_rest/tests/usps_rest/test_manifest.py @@ -19,7 +19,7 @@ def test_create_tracking_request(self): self.assertEqual(request.serialize(), ManifestRequest) def test_create_manifest(self): - with patch("karrio.mappers.usps.proxy.lib.request") as mock: + with patch("karrio.mappers.usps_rest.proxy.lib.request") as mock: mock.return_value = "{}" karrio.Manifest.create(self.ManifestRequest).from_(gateway) @@ -29,7 +29,7 @@ def test_create_manifest(self): ) def test_parse_manifest_response(self): - with patch("karrio.mappers.usps.proxy.lib.request") as mock: + with patch("karrio.mappers.usps_rest.proxy.lib.request") as mock: mock.return_value = ManifestResponse parsed_response = ( karrio.Manifest.create(self.ManifestRequest).from_(gateway).parse() @@ -59,8 +59,8 @@ def test_parse_manifest_response(self): ParsedManifestResponse = [ { - "carrier_id": "usps", - "carrier_name": "usps", + "carrier_id": "usps_rest", + "carrier_name": "usps_rest", "doc": {"manifest": ANY}, "meta": {"manifestNumber": "string", "trackingNumbers": ["string"]}, }, diff --git a/modules/connectors/usps/tests/usps/test_pickup.py b/modules/connectors/usps_rest/tests/usps_rest/test_pickup.py similarity index 93% rename from modules/connectors/usps/tests/usps/test_pickup.py rename to modules/connectors/usps_rest/tests/usps_rest/test_pickup.py index 9c375f2da1..25cd5ebfbe 100644 --- a/modules/connectors/usps/tests/usps/test_pickup.py +++ b/modules/connectors/usps_rest/tests/usps_rest/test_pickup.py @@ -31,7 +31,7 @@ def test_create_cancel_pickup_request(self): self.assertEqual(request.serialize(), PickupCancelRequest) def test_create_pickup(self): - with patch("karrio.mappers.usps.proxy.lib.request") as mock: + with patch("karrio.mappers.usps_rest.proxy.lib.request") as mock: mock.return_value = "{}" karrio.Pickup.schedule(self.PickupRequest).from_(gateway) @@ -41,7 +41,7 @@ def test_create_pickup(self): ) def test_update_pickup(self): - with patch("karrio.mappers.usps.proxy.lib.request") as mock: + with patch("karrio.mappers.usps_rest.proxy.lib.request") as mock: mock.return_value = "{}" karrio.Pickup.update(self.PickupUpdateRequest).from_(gateway) @@ -51,7 +51,7 @@ def test_update_pickup(self): ) def test_cancel_shipment(self): - with patch("karrio.mappers.usps.proxy.lib.request") as mock: + with patch("karrio.mappers.usps_rest.proxy.lib.request") as mock: mock.return_value = "{}" karrio.Pickup.cancel(self.PickupCancelRequest).from_(gateway) @@ -61,7 +61,7 @@ def test_cancel_shipment(self): ) def test_parse_pickup_response(self): - with patch("karrio.mappers.usps.proxy.lib.request") as mock: + with patch("karrio.mappers.usps_rest.proxy.lib.request") as mock: mock.return_value = PickupResponse parsed_response = ( karrio.Pickup.schedule(self.PickupRequest).from_(gateway).parse() @@ -71,7 +71,7 @@ def test_parse_pickup_response(self): self.assertListEqual(lib.to_dict(parsed_response), ParsedPickupResponse) def test_parse_cancel_pickup_response(self): - with patch("karrio.mappers.usps.proxy.lib.request") as mock: + with patch("karrio.mappers.usps_rest.proxy.lib.request") as mock: mock.return_value = PickupCancelResponse parsed_response = ( karrio.Pickup.cancel(self.PickupCancelRequest).from_(gateway).parse() @@ -129,8 +129,8 @@ def test_parse_cancel_pickup_response(self): ParsedPickupResponse = [ { - "carrier_id": "usps", - "carrier_name": "usps", + "carrier_id": "usps_rest", + "carrier_name": "usps_rest", "confirmation_number": "string", "pickup_date": "2019-08-24", }, @@ -139,8 +139,8 @@ def test_parse_cancel_pickup_response(self): ParsedCancelPickupResponse = [ { - "carrier_id": "usps", - "carrier_name": "usps", + "carrier_id": "usps_rest", + "carrier_name": "usps_rest", "operation": "Cancel Pickup", "success": True, }, diff --git a/modules/connectors/usps_rest/tests/usps_rest/test_rate.py b/modules/connectors/usps_rest/tests/usps_rest/test_rate.py new file mode 100644 index 0000000000..20ade93eaf --- /dev/null +++ b/modules/connectors/usps_rest/tests/usps_rest/test_rate.py @@ -0,0 +1,168 @@ +import unittest +from unittest.mock import patch, ANY +from .fixture import gateway +from tests import logger + +import karrio +import karrio.lib as lib +import karrio.core.models as models + + +class TestUSPSRating(unittest.TestCase): + def setUp(self): + self.maxDiff = None + self.RateRequest = models.RateRequest(**RatePayload) + + def test_create_rate_request(self): + request = gateway.mapper.create_rate_request(self.RateRequest) + logger.debug(request.serialize()) + self.assertEqual(request.serialize(), RateRequest) + + def test_get_rate(self): + with patch("karrio.mappers.usps_rest.proxy.lib.request") as mock: + mock.return_value = "{}" + karrio.Rating.fetch(self.RateRequest).from_(gateway) + + self.assertEqual( + mock.call_args[1]["url"], + f"{gateway.settings.server_url}/v3/total-rates/search", + ) + + def test_parse_rate_response(self): + with patch("karrio.mappers.usps_rest.proxy.lib.request") as mock: + mock.return_value = RateResponse + parsed_response = ( + karrio.Rating.fetch(self.RateRequest).from_(gateway).parse() + ) + logger.debug(lib.to_dict(parsed_response)) + self.assertListEqual(lib.to_dict(parsed_response), ParsedRateResponse) + + +if __name__ == "__main__": + unittest.main() + + +RatePayload = { + "shipper": { + "company_name": "ABC Corp.", + "address_line1": "1098 N Fraser Street", + "city": "Georgetown", + "postal_code": "29440", + "country_code": "US", + "person_name": "Tall Tom", + "phone_number": "8005554526", + "state_code": "SC", + }, + "recipient": { + "company_name": "Horizon", + "address_line1": "1309 S Agnew Avenue", + "address_line2": "Apt 303", + "city": "Oklahoma City", + "postal_code": "73108", + "country_code": "US", + "person_name": "Lina Smith", + "phone_number": "+1 123 456 7890", + "state_code": "OK", + }, + "parcels": [ + { + "height": 50, + "length": 50, + "weight": 20, + "width": 12, + "dimension_unit": "CM", + "weight_unit": "KG", + } + ], + "options": { + "usps_label_delivery_service": True, + "usps_price_type": "RETAIL", + "shipment_date": "2024-07-28", + }, + "services": ["usps_parcel_select"], + "reference": "REF-001", +} + +ParsedRateResponse = [ + [ + { + "carrier_id": "usps_rest", + "carrier_name": "usps_rest", + "currency": "USD", + "extra_charges": [ + {"amount": 3.35, "currency": "USD", "name": "Base Charge"}, + {"amount": 3.35, "currency": "USD", "name": "string"}, + {"amount": 3.35, "currency": "USD", "name": "Adult Signature Required"}, + ], + "meta": {"service_name": "usps_parcel_select", "zone": "01"}, + "service": "usps_parcel_select", + "total_charge": 3.35, + } + ], + [], +] + + +RateRequest = [ + { + "accountNumber": "Your Account Number", + "accountType": "EPS", + "destinationZIPCode": "73108", + "extraServices": [415], + "height": 19.69, + "length": 19.69, + "mailClasses": ["PARCEL_SELECT"], + "mailingDate": "2024-07-28", + "originZIPCode": "29440", + "priceType": "RETAIL", + "weight": 44.1, + "width": 4.72, + } +] + + +RateResponse = """{ + "rateOptions": [ + { + "totalBasePrice": 3.35, + "rates": [ + { + "SKU": "DPXX0XXXXX07200", + "description": "string", + "priceType": "RETAIL", + "price": 3.35, + "weight": 5, + "dimWeight": 5, + "fees": [ + { + "name": "string", + "SKU": "string", + "price": 0 + } + ], + "startDate": "2021-07-16", + "endDate": "2021-07-16", + "mailClass": "PARCEL_SELECT", + "zone": "01" + } + ], + "extraServices": [ + { + "extraService": "922", + "name": "Adult Signature Required", + "SKU": "DPXX0XXXXX07200", + "priceType": "RETAIL", + "price": 3.35, + "warnings": [ + { + "warningCode": "string", + "warningDescription": "string" + } + ] + } + ], + "totalPrice": 3.35 + } + ] +} +""" diff --git a/modules/connectors/usps_rest/tests/usps_rest/test_shipment.py b/modules/connectors/usps_rest/tests/usps_rest/test_shipment.py new file mode 100644 index 0000000000..efd48a4aa5 --- /dev/null +++ b/modules/connectors/usps_rest/tests/usps_rest/test_shipment.py @@ -0,0 +1,328 @@ +import unittest +from unittest.mock import patch, ANY +from .fixture import gateway +from tests import logger + +import karrio +import karrio.lib as lib +import karrio.core.models as models + + +class TestUSPSShipping(unittest.TestCase): + def setUp(self): + self.maxDiff = None + self.ShipmentRequest = models.ShipmentRequest(**ShipmentPayload) + self.ShipmentCancelRequest = models.ShipmentCancelRequest( + **ShipmentCancelPayload + ) + + def test_create_shipment_request(self): + request = gateway.mapper.create_shipment_request(self.ShipmentRequest) + logger.debug(request.serialize()) + self.assertEqual(request.serialize(), ShipmentRequest) + + def test_create_cancel_shipment_request(self): + request = gateway.mapper.create_cancel_shipment_request( + self.ShipmentCancelRequest + ) + logger.debug(request.serialize()) + self.assertEqual(request.serialize(), ShipmentCancelRequest) + + def test_create_shipment(self): + with patch("karrio.mappers.usps_rest.proxy.lib.request") as mock: + mock.return_value = "{}" + karrio.Shipment.create(self.ShipmentRequest).from_(gateway) + + self.assertEqual( + mock.call_args[1]["url"], + f"{gateway.settings.server_url}/v3/label", + ) + + def test_cancel_shipment(self): + with patch("karrio.mappers.usps_rest.proxy.lib.request") as mock: + mock.return_value = "{}" + karrio.Shipment.cancel(self.ShipmentCancelRequest).from_(gateway) + + self.assertEqual( + mock.call_args[1]["url"], + f"{gateway.settings.server_url}/v3/label/794947717776", + ) + + def test_parse_shipment_response(self): + with patch("karrio.mappers.usps_rest.proxy.lib.request") as mock: + mock.return_value = ShipmentResponse + parsed_response = ( + karrio.Shipment.create(self.ShipmentRequest).from_(gateway).parse() + ) + logger.debug(lib.to_dict(parsed_response)) + self.assertListEqual(lib.to_dict(parsed_response), ParsedShipmentResponse) + + def test_parse_cancel_shipment_response(self): + with patch("karrio.mappers.usps_rest.proxy.lib.request") as mock: + mock.return_value = ShipmentCancelResponse + parsed_response = ( + karrio.Shipment.cancel(self.ShipmentCancelRequest) + .from_(gateway) + .parse() + ) + logger.debug(lib.to_dict(parsed_response)) + self.assertListEqual( + lib.to_dict(parsed_response), ParsedCancelShipmentResponse + ) + + +if __name__ == "__main__": + unittest.main() + + +ShipmentPayload = { + "shipper": { + "company_name": "ABC Corp.", + "address_line1": "1098 N Fraser Street", + "city": "Georgetown", + "postal_code": "29440", + "country_code": "US", + "person_name": "Tall Tom", + "phone_number": "8005554526", + "state_code": "SC", + }, + "recipient": { + "company_name": "Horizon", + "address_line1": "1309 S Agnew Avenue", + "address_line2": "Apt 303", + "city": "Oklahoma City", + "postal_code": "73108", + "country_code": "US", + "person_name": "Lina Smith", + "phone_number": "+1 123 456 7890", + "state_code": "OK", + }, + "parcels": [ + { + "height": 50, + "length": 50, + "weight": 20, + "width": 12, + "dimension_unit": "CM", + "weight_unit": "KG", + } + ], + "service": "carrier_service", + "options": { + "signature_required": True, + "shipment_date": "2024-07-28", + }, + "reference": "#Order 11111", +} + +ShipmentCancelPayload = { + "shipment_identifier": "794947717776", +} + +ParsedShipmentResponse = [ + { + "carrier_id": "usps_rest", + "carrier_name": "usps_rest", + "docs": {"invoice": ANY, "label": ANY}, + "label_type": "PDF", + "meta": { + "SKU": "string", + "labelBrokerID": "string", + "postage": 0, + "routingInformation": "string", + }, + "shipment_identifier": "string", + "tracking_number": "string", + }, + [], +] + +ParsedCancelShipmentResponse = [ + { + "carrier_id": "usps_rest", + "carrier_name": "usps_rest", + "operation": "Cancel Shipment", + "success": True, + }, + [], +] + + +ShipmentRequest = [ + { + "fromAddress": { + "ZIPPlus4": "29440", + "city": "Georgetown", + "firm": "ABC Corp.", + "firstName": "Tall Tom", + "ignoreBadAddress": True, + "phone": "8005554526", + "streetAddress": "1098 N Fraser Street", + }, + "imageInfo": { + "imageType": "PDF", + "labelType": "4X6LABEL", + "receiptOption": "SEPARATE_PAGE", + }, + "packageDescription": { + "customerReference": [ + {"printReferenceNumber": True, "referenceNumber": "#Order 11111"} + ], + "destinationEntryFacilityType": "NONE", + "dimensionsUOM": "in", + "girth": 124.0, + "height": 19.69, + "inductionZIPCode": "29440", + "length": 19.69, + "mailClass": "carrier_service", + "mailingDate": "2024-07-28", + "processingCategory": "NON_MACHINABLE", + "rateIndicator": "SP", + "weight": 44.1, + "weightUOM": "lb", + "width": 4.72, + }, + "senderAddress": { + "ZIPPlus4": "29440", + "city": "Georgetown", + "firm": "ABC Corp.", + "firstName": "Tall Tom", + "ignoreBadAddress": True, + "phone": "8005554526", + "streetAddress": "1098 N Fraser Street", + }, + "toAddress": { + "ZIPCode": "73108", + "city": "Oklahoma City", + "firm": "Horizon", + "firstName": "Lina Smith", + "ignoreBadAddress": True, + "phone": "+1 123 456 7890", + "secondaryAddress": "Apt 303", + "streetAddress": "1309 S Agnew Avenue", + }, + } +] + +ShipmentCancelRequest = [{"trackingNumber": "794947717776"}] + +ShipmentResponse = """{ + "labelMetadata": { + "labelAddress": { + "streetAddress": "string", + "streetAddressAbbreviation": "string", + "secondaryAddress": "string", + "cityAbbreviation": "string", + "city": "string", + "state": "st", + "ZIPCode": "string", + "ZIPPlus4": "string", + "urbanization": "string", + "firstName": "string", + "lastName": "string", + "firm": "string", + "phone": "string", + "email": "user@example.com", + "ignoreBadAddress": true + }, + "routingInformation": "string", + "trackingNumber": "string", + "constructCode": "string", + "SKU": "string", + "postage": 0, + "extraServices": [ + { + "name": "string", + "SKU": "string", + "price": 0 + } + ], + "zone": "string", + "commitment": { + "name": "string", + "scheduleDeliveryDate": "string" + }, + "weightUOM": "string", + "weight": 0, + "dimensionalWeight": 0, + "fees": [ + { + "name": "string", + "SKU": "string", + "price": 0 + } + ], + "permitHolderName": "string", + "inductionType": {}, + "labelBrokerID": "string", + "links": [ + { + "rel": ["string"], + "title": "string", + "href": "http://example.com", + "method": "GET", + "submissionMediaType": "string", + "targetMediaType": "string" + } + ] + }, + "returnLabelMetadata": { + "labelAddress": { + "streetAddress": "string", + "streetAddressAbbreviation": "string", + "secondaryAddress": "string", + "cityAbbreviation": "string", + "city": "string", + "state": "st", + "ZIPCode": "string", + "ZIPPlus4": "string", + "urbanization": "string", + "firstName": "string", + "lastName": "string", + "firm": "string", + "phone": "string", + "email": "user@example.com", + "ignoreBadAddress": true + }, + "routingInformation": "string", + "trackingNumber": "string", + "SKU": "string", + "postage": 0, + "extraServices": [ + { + "name": "string", + "SKU": "string", + "price": 0 + } + ], + "zone": "string", + "weightUOM": "string", + "weight": 0, + "dimensionalWeight": 0, + "fees": [ + { + "name": "string", + "SKU": "string", + "price": 0 + } + ], + "labelBrokerID": "string", + "links": [ + { + "rel": ["string"], + "title": "string", + "href": "http://example.com", + "method": "GET", + "submissionMediaType": "string", + "targetMediaType": "string" + } + ] + }, + "labelImage": "string", + "receiptImage": "string", + "returnLabelImage": "string", + "returnReceiptImage": "string" +} +""" + +ShipmentCancelResponse = """{"ok": true}""" diff --git a/modules/connectors/usps_rest/tests/usps_rest/test_tracking.py b/modules/connectors/usps_rest/tests/usps_rest/test_tracking.py new file mode 100644 index 0000000000..98fc382a22 --- /dev/null +++ b/modules/connectors/usps_rest/tests/usps_rest/test_tracking.py @@ -0,0 +1,225 @@ +import unittest +from unittest.mock import patch, ANY +from .fixture import gateway +from tests import logger + +import karrio +import karrio.lib as lib +import karrio.core.models as models + + +class TestUSPSTracking(unittest.TestCase): + def setUp(self): + self.maxDiff = None + self.TrackingRequest = models.TrackingRequest(**TrackingPayload) + + def test_create_tracking_request(self): + request = gateway.mapper.create_tracking_request(self.TrackingRequest) + logger.debug(request.serialize()) + self.assertEqual(request.serialize(), TrackingRequest) + + def test_get_tracking(self): + with patch("karrio.mappers.usps_rest.proxy.lib.request") as mock: + mock.return_value = "{}" + karrio.Tracking.fetch(self.TrackingRequest).from_(gateway) + + self.assertEqual( + mock.call_args[1]["url"], + f"{gateway.settings.server_url}/v3/tracking/89108749065090", + ) + + def test_parse_tracking_response(self): + with patch("karrio.mappers.usps_rest.proxy.lib.request") as mock: + mock.return_value = TrackingResponse + parsed_response = ( + karrio.Tracking.fetch(self.TrackingRequest).from_(gateway).parse() + ) + logger.debug(lib.to_dict(parsed_response)) + self.assertListEqual(lib.to_dict(parsed_response), ParsedTrackingResponse) + + def test_parse_error_response(self): + with patch("karrio.mappers.usps_rest.proxy.lib.request") as mock: + mock.return_value = ErrorResponse + parsed_response = ( + karrio.Tracking.fetch(self.TrackingRequest).from_(gateway).parse() + ) + logger.debug(lib.to_dict(parsed_response)) + self.assertListEqual(lib.to_dict(parsed_response), ParsedErrorResponse) + + +if __name__ == "__main__": + unittest.main() + + +TrackingPayload = { + "tracking_numbers": ["89108749065090"], +} + +ParsedTrackingResponse = [ + [ + { + "carrier_id": "usps_rest", + "carrier_name": "usps_rest", + "delivered": False, + "estimated_delivery": "2019-08-24", + "events": [ + { + "code": "string", + "date": "2019-08-24", + "description": "string", + "location": "string, string, string, string", + "time": "14:15 PM", + } + ], + "info": { + "carrier_tracking_link": "https://tools.usps.com/go/TrackConfirmAction?tLabels=string", + "expected_delivery": "2019-08-24", + "shipment_destination_country": "string", + "shipment_destination_postal_code": "string", + "shipment_origin_country": "st", + "shipment_origin_postal_code": "strin", + "shipment_service": "string", + }, + "status": "in_transit", + "tracking_number": "string", + } + ], + [], +] + +ParsedErrorResponse = [ + [], + [ + { + "carrier_id": "usps_rest", + "carrier_name": "usps_rest", + "code": "string", + "details": { + "errors": [ + { + "code": "string", + "detail": "string", + "source": {"example": "string", "parameter": "string"}, + "status": "string", + "title": "string", + } + ], + "tracking_number": "89108749065090", + }, + "message": "string", + } + ], +] + + +TrackingRequest = ["89108749065090"] + +TrackingResponse = """{ + "trackingNumber": "string", + "additionalInfo": "string", + "ADPScripting": "string", + "archiveRestoreInfo": "string", + "associatedLabel": "string", + "carrierRelease": true, + "mailClass": "BOUND_PRINTED_MATTER", + "destinationCity": "string", + "destinationCountryCode": "string", + "destinationState": "st", + "destinationZIP": "string", + "editedLabelId": "string", + "emailEnabled": true, + "endOfDay": "string", + "eSOFEligible": true, + "expectedDeliveryTimeStamp": "2019-08-24T14:15:22Z", + "expectedDeliveryType": "string", + "guaranteedDeliveryTimeStamp": "2019-08-24T14:15:22Z", + "guaranteedDetails": "string", + "itemShape": "LETTER", + "kahalaIndicator": true, + "mailType": "INTERNATIONAL_INBOUND", + "approximateIntakeDate": "string", + "uniqueTrackingId": "string", + "onTime": true, + "originCity": "string", + "originCountry": "st", + "originState": "str", + "originZIP": "strin", + "proofOfDeliveryEnabled": true, + "predictedDeliveryTimeStamp": "2019-08-24T14:15:22Z", + "predictedDeliveryDate": "2019-08-24", + "predictedDeliveryWindowStartTime": "string", + "predictedDeliveryWindowEndTime": "string", + "relatedReturnReceiptID": "string", + "redeliveryEnabled": true, + "enabledNotificationRequests": { + "SMS": { + "futureDelivery": true, + "alertDelivery": true, + "todayDelivery": true, + "UP": true, + "DND": true + }, + "EMail": { + "futureDelivery": true, + "alertDelivery": true, + "todayDelivery": true, + "UP": true, + "DND": true, + "firstDisplayable": true, + "otherActivity": true + } + }, + "restoreEnabled": true, + "returnDateNotice": "2019-08-24", + "RRAMenabled": true, + "RREEnabled": true, + "services": ["string"], + "serviceTypeCode": "string", + "status": "string", + "statusCategory": "string", + "statusSummary": "Your item was delivered at 12:55 pm on April 05, 2010 in FALMOUTH, MA 02540", + "trackingProofOfDeliveryEnabled": true, + "valueofArticle": "string", + "extendRetentionPurchasedCode": "string", + "extendRetentionExtraServiceCodeOptions": [{}], + "trackingEvents": [ + { + "eventType": "string", + "eventTimestamp": "2019-08-24T14:15:22Z", + "GMTTimestamp": "2024-04-04T14:03:12.041Z", + "GMTOffset": "-7:00", + "eventCountry": "string", + "eventCity": "string", + "eventState": "string", + "eventZIP": "string", + "firm": "string", + "name": "string", + "authorizedAgent": true, + "eventCode": "string", + "actionCode": "string", + "reasonCode": "string" + } + ] +} +""" + +ErrorResponse = """{ + "apiVersion": "string", + "error": { + "code": "string", + "message": "string", + "errors": [ + { + "status": "string", + "code": "string", + "title": "string", + "detail": "string", + "source": { + "parameter": "string", + "example": "string" + } + } + ] + } +} +""" diff --git a/modules/connectors/usps_rest_international/README.md b/modules/connectors/usps_rest_international/README.md new file mode 100644 index 0000000000..4617ab659f --- /dev/null +++ b/modules/connectors/usps_rest_international/README.md @@ -0,0 +1,31 @@ + +# karrio.usps_rest_international + +This package is a USPS International extension of the [karrio](https://pypi.org/project/karrio) multi carrier shipping SDK. + +## Requirements + +`Python 3.7+` + +## Installation + +```bash +pip install karrio.usps_rest_international +``` + +## Usage + +```python +import karrio +from karrio.mappers.usps_rest_international.settings import Settings + + +# Initialize a carrier gateway +usps_rest_international = karrio.gateway["usps_rest_international"].create( + Settings( + ... + ) +) +``` + +Check the [Karrio Mutli-carrier SDK docs](https://docs.karrio.io) for Shipping API requests diff --git a/modules/connectors/usps_rest_international/generate b/modules/connectors/usps_rest_international/generate new file mode 100755 index 0000000000..78544358ce --- /dev/null +++ b/modules/connectors/usps_rest_international/generate @@ -0,0 +1,24 @@ +SCHEMAS=./schemas +LIB_MODULES=./karrio/schemas/usps_rest_international +find "${LIB_MODULES}" -name "*.py" -exec rm -r {} \; +touch "${LIB_MODULES}/__init__.py" + +quicktype() { + echo "Generating $1..." + docker run -it --rm --name quicktype -v $PWD:/app -e SCHEMAS=/app/schemas -e LIB_MODULES=/app/karrio/schemas/usps \ + karrio/tools /quicktype/script/quicktype --no-uuids --no-date-times --no-enums --src-lang json --lang jstruct \ + --no-nice-property-names --all-properties-optional --type-as-suffix $@ +} + +quicktype --src="${SCHEMAS}/error_response.json" --out="${LIB_MODULES}/error_response.py" +quicktype --src="${SCHEMAS}/label_request.json" --out="${LIB_MODULES}/label_request.py" +quicktype --src="${SCHEMAS}/label_response.json" --out="${LIB_MODULES}/label_response.py" +quicktype --src="${SCHEMAS}/pickup_request.json" --out="${LIB_MODULES}/pickup_request.py" +quicktype --src="${SCHEMAS}/pickup_response.json" --out="${LIB_MODULES}/pickup_response.py" +quicktype --src="${SCHEMAS}/pickup_update_request.json" --out="${LIB_MODULES}/pickup_update_request.py" +quicktype --src="${SCHEMAS}/pickup_update_response.json" --out="${LIB_MODULES}/pickup_update_response.py" +quicktype --src="${SCHEMAS}/rate_request.json" --out="${LIB_MODULES}/rate_request.py" +quicktype --src="${SCHEMAS}/rate_response.json" --out="${LIB_MODULES}/rate_response.py" +quicktype --src="${SCHEMAS}/scan_form_request.json" --out="${LIB_MODULES}/scan_form_request.py" +quicktype --src="${SCHEMAS}/scan_form_response.json" --out="${LIB_MODULES}/scan_form_response.py" +quicktype --src="${SCHEMAS}/tracking_response.json" --out="${LIB_MODULES}/tracking_response.py" diff --git a/modules/connectors/usps_rest_international/karrio/mappers/usps_rest_international/__init__.py b/modules/connectors/usps_rest_international/karrio/mappers/usps_rest_international/__init__.py new file mode 100644 index 0000000000..d91670daba --- /dev/null +++ b/modules/connectors/usps_rest_international/karrio/mappers/usps_rest_international/__init__.py @@ -0,0 +1,22 @@ +from karrio.core.metadata import Metadata + +from karrio.mappers.usps_rest_international.mapper import Mapper +from karrio.mappers.usps_rest_international.proxy import Proxy +from karrio.mappers.usps_rest_international.settings import Settings +import karrio.providers.usps_rest_international.units as units +import karrio.providers.usps_rest_international.utils as utils + + +METADATA = Metadata( + id="usps_rest_international", + label="USPS International", + # Integrations + Mapper=Mapper, + Proxy=Proxy, + Settings=Settings, + # Data Units + is_hub=False, + options=units.ShippingOption, + services=units.ShippingService, + connection_configs=utils.ConnectionConfig, +) diff --git a/modules/connectors/usps_rest_international/karrio/mappers/usps_rest_international/mapper.py b/modules/connectors/usps_rest_international/karrio/mappers/usps_rest_international/mapper.py new file mode 100644 index 0000000000..29ff2ae57c --- /dev/null +++ b/modules/connectors/usps_rest_international/karrio/mappers/usps_rest_international/mapper.py @@ -0,0 +1,88 @@ +"""Karrio USPS client mapper.""" + +import typing +import karrio.lib as lib +import karrio.api.mapper as mapper +import karrio.core.models as models +import karrio.providers.usps_rest_international as provider +import karrio.mappers.usps_rest_international.settings as provider_settings + + +class Mapper(mapper.Mapper): + settings: provider_settings.Settings + + def create_rate_request(self, payload: models.RateRequest) -> lib.Serializable: + return provider.rate_request(payload, self.settings) + + def create_tracking_request( + self, payload: models.TrackingRequest + ) -> lib.Serializable: + return provider.tracking_request(payload, self.settings) + + def create_shipment_request( + self, payload: models.ShipmentRequest + ) -> lib.Serializable: + return provider.shipment_request(payload, self.settings) + + def create_pickup_request(self, payload: models.PickupRequest) -> lib.Serializable: + return provider.pickup_request(payload, self.settings) + + def create_pickup_update_request( + self, payload: models.PickupUpdateRequest + ) -> lib.Serializable: + return provider.pickup_update_request(payload, self.settings) + + def create_cancel_pickup_request( + self, payload: models.PickupCancelRequest + ) -> lib.Serializable: + return provider.pickup_cancel_request(payload, self.settings) + + def create_cancel_shipment_request( + self, payload: models.ShipmentCancelRequest + ) -> lib.Serializable[str]: + return provider.shipment_cancel_request(payload, self.settings) + + def create_manifest_request( + self, payload: models.ManifestRequest + ) -> lib.Serializable: + return provider.manifest_request(payload, self.settings) + + def parse_cancel_pickup_response( + self, response: lib.Deserializable[str] + ) -> typing.Tuple[models.ConfirmationDetails, typing.List[models.Message]]: + return provider.parse_pickup_cancel_response(response, self.settings) + + def parse_cancel_shipment_response( + self, response: lib.Deserializable[str] + ) -> typing.Tuple[models.ConfirmationDetails, typing.List[models.Message]]: + return provider.parse_shipment_cancel_response(response, self.settings) + + def parse_pickup_response( + self, response: lib.Deserializable[str] + ) -> typing.Tuple[models.PickupDetails, typing.List[models.Message]]: + return provider.parse_pickup_response(response, self.settings) + + def parse_pickup_update_response( + self, response: lib.Deserializable[str] + ) -> typing.Tuple[models.PickupDetails, typing.List[models.Message]]: + return provider.parse_pickup_update_response(response, self.settings) + + def parse_rate_response( + self, response: lib.Deserializable[str] + ) -> typing.Tuple[typing.List[models.RateDetails], typing.List[models.Message]]: + return provider.parse_rate_response(response, self.settings) + + def parse_shipment_response( + self, response: lib.Deserializable[str] + ) -> typing.Tuple[models.ShipmentDetails, typing.List[models.Message]]: + return provider.parse_shipment_response(response, self.settings) + + def parse_tracking_response( + self, response: lib.Deserializable[str] + ) -> typing.Tuple[typing.List[models.TrackingDetails], typing.List[models.Message]]: + return provider.parse_tracking_response(response, self.settings) + + def parse_manifest_response( + self, response: lib.Deserializable[str] + ) -> typing.Tuple[models.ManifestDetails, typing.List[models.Message]]: + return provider.parse_manifest_response(response, self.settings) diff --git a/modules/connectors/usps_rest_international/karrio/mappers/usps_rest_international/proxy.py b/modules/connectors/usps_rest_international/karrio/mappers/usps_rest_international/proxy.py new file mode 100644 index 0000000000..6289cbcc53 --- /dev/null +++ b/modules/connectors/usps_rest_international/karrio/mappers/usps_rest_international/proxy.py @@ -0,0 +1,151 @@ +"""Karrio USPS client proxy.""" + +import karrio.lib as lib +import karrio.api.proxy as proxy +import karrio.mappers.usps_rest_international.settings as provider_settings + + +class Proxy(proxy.Proxy): + settings: provider_settings.Settings + + def get_rates(self, request: lib.Serializable) -> lib.Deserializable[str]: + response = lib.run_asynchronously( + lambda _: lib.request( + url=f"{self.settings.server_url}/v3/total-rates/search", + data=lib.to_json(_), + trace=self.trace_as("json"), + method="POST", + headers={ + "Content-Type": "application/json", + "Authorization": f"Bearer {self.settings.access_token}", + }, + ), + request.serialize(), + ) + + return lib.Deserializable(response, lambda _: [lib.to_dict(_) for _ in _]) + + def create_shipment(self, request: lib.Serializable) -> lib.Deserializable[str]: + response = lib.run_asynchronously( + lambda _: lib.request( + url=f"{self.settings.server_url}/v3/international-label", + data=lib.to_json(_), + trace=self.trace_as("json"), + method="POST", + headers={ + "Content-Type": "application/json", + "Authorization": f"Bearer {self.settings.access_token}", + }, + ), + request.serialize(), + ) + + return lib.Deserializable( + response, + lambda _: [lib.to_dict(_) for _ in _], + request.ctx, + ) + + def cancel_shipment(self, request: lib.Serializable) -> lib.Deserializable[str]: + response = lib.run_asynchronously( + lambda _: ( + _["trackingNumber"], + lib.request( + url=f"{self.settings.server_url}/v3/international-label/{_['trackingNumber']}", + data=lib.to_json(request.serialize()), + trace=self.trace_as("json"), + method="POST", + headers={ + "Content-Type": "application/json", + "Authorization": f"Bearer {self.settings.access_token}", + }, + on_ok=lambda _: '{"ok": true}', + ), + ), + request.serialize(), + ) + + return lib.Deserializable( + response, + lambda __: [(_[0], lib.to_dict(_[1])) for _ in __], + ) + + def get_tracking(self, request: lib.Serializable) -> lib.Deserializable[str]: + response = lib.run_asynchronously( + lambda trackingNumber: ( + trackingNumber, + lib.request( + url=f"{self.settings.server_url}/v3/tracking/{trackingNumber}", + data=lib.to_json(request.serialize()), + trace=self.trace_as("json"), + method="POST", + headers={ + "Content-Type": "application/json", + "Authorization": f"Bearer {self.settings.access_token}", + }, + ), + ), + request.serialize(), + ) + + return lib.Deserializable( + response, + lambda __: [(_[0], lib.to_dict(_[1])) for _ in __], + ) + + def schedule_pickup(self, request: lib.Serializable) -> lib.Deserializable[str]: + response = lib.request( + url=f"{self.settings.server_url}/v3/carrier-pickup", + data=lib.to_json(request.serialize()), + trace=self.trace_as("json"), + method="POST", + headers={ + "Content-Type": "application/json", + "Authorization": f"Bearer {self.settings.access_token}", + }, + ) + + return lib.Deserializable(response, lib.to_dict) + + def modify_pickup(self, request: lib.Serializable) -> lib.Deserializable[str]: + response = lib.request( + url=f"{self.settings.server_url}/v3/carrier-pickup/{request.ctx['confirmationNumber']}", + data=lib.to_json(request.serialize()), + trace=self.trace_as("json"), + method="POST", + headers={ + "Content-Type": "application/json", + "Authorization": f"Bearer {self.settings.access_token}", + }, + ) + + return lib.Deserializable(response, lib.to_dict) + + def cancel_pickup(self, request: lib.Serializable) -> lib.Deserializable[str]: + response = lib.request( + url=f"{self.settings.server_url}/v3/carrier-pickup/{request.serialize()['confirmationNumber']}", + data=lib.to_json(request.serialize()), + trace=self.trace_as("json"), + method="POST", + headers={ + "Content-Type": "application/json", + "Authorization": f"Bearer {self.settings.access_token}", + }, + on_ok=lambda _: '{"ok": true}', + ) + + return lib.Deserializable(response, lib.to_dict) + + def create_manifest(self, request: lib.Serializable) -> lib.Deserializable[str]: + response = lib.request( + url=f"{self.settings.server_url}/v3/scan-form", + data=lib.to_json(request.serialize()), + trace=self.trace_as("json"), + method="POST", + headers={ + "Content-Type": "application/json", + "Authorization": f"Bearer {self.settings.access_token}", + }, + ) + + return lib.Deserializable(response, lib.to_dict) diff --git a/modules/connectors/usps_rest_international/karrio/mappers/usps_rest_international/settings.py b/modules/connectors/usps_rest_international/karrio/mappers/usps_rest_international/settings.py new file mode 100644 index 0000000000..0857081948 --- /dev/null +++ b/modules/connectors/usps_rest_international/karrio/mappers/usps_rest_international/settings.py @@ -0,0 +1,23 @@ +"""Karrio USPS client settings.""" + +import attr +import karrio.providers.usps_rest_international.utils as provider_utils + + +@attr.s(auto_attribs=True) +class Settings(provider_utils.Settings): + """USPS connection settings.""" + + # Add carrier specific API connection properties here + client_id: str + client_secret: str + account_type: str = None + account_number: str = None + + # generic properties + id: str = None + test_mode: bool = False + carrier_id: str = "usps_rest_international" + account_country_code: str = "US" + metadata: dict = {} + config: dict = {} diff --git a/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/__init__.py b/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/__init__.py new file mode 100644 index 0000000000..420711b15f --- /dev/null +++ b/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/__init__.py @@ -0,0 +1,29 @@ +"""Karrio USPS provider imports.""" + +from karrio.providers.usps_rest_international.utils import Settings +from karrio.providers.usps_rest_international.rate import ( + parse_rate_response, + rate_request, +) +from karrio.providers.usps_rest_international.shipment import ( + parse_shipment_cancel_response, + parse_shipment_response, + shipment_cancel_request, + shipment_request, +) +from karrio.providers.usps_rest_international.pickup import ( + parse_pickup_cancel_response, + parse_pickup_update_response, + parse_pickup_response, + pickup_update_request, + pickup_cancel_request, + pickup_request, +) +from karrio.providers.usps_rest_international.tracking import ( + parse_tracking_response, + tracking_request, +) +from karrio.providers.usps_rest_international.manifest import ( + parse_manifest_response, + manifest_request, +) diff --git a/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/error.py b/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/error.py new file mode 100644 index 0000000000..929e9e34f0 --- /dev/null +++ b/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/error.py @@ -0,0 +1,26 @@ +"""Karrio USPS error parser.""" + +import typing +import karrio.lib as lib +import karrio.core.models as models +import karrio.providers.usps_rest_international.utils as provider_utils + + +def parse_error_response( + response: typing.Union[dict, typing.List[dict]], + settings: provider_utils.Settings, + **kwargs, +) -> typing.List[models.Message]: + responses = response if isinstance(response, list) else [response] + errors: list = [response["error"] for response in responses if "error" in response] + + return [ + models.Message( + carrier_id=settings.carrier_id, + carrier_name=settings.carrier_name, + code=error.get("code"), + message=error.get("message"), + details={**kwargs, "errors": error.get("errors", [])}, + ) + for error in errors + ] diff --git a/modules/connectors/usps/karrio/providers/usps/manifest.py b/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/manifest.py similarity index 90% rename from modules/connectors/usps/karrio/providers/usps/manifest.py rename to modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/manifest.py index 3dab7b07e0..fb6e16228c 100644 --- a/modules/connectors/usps/karrio/providers/usps/manifest.py +++ b/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/manifest.py @@ -1,15 +1,15 @@ """Karrio USPS manifest API implementation.""" -import karrio.schemas.usps.scan_form_request as usps -import karrio.schemas.usps.scan_form_response as manifest +import karrio.schemas.usps_rest_international.scan_form_request as usps +import karrio.schemas.usps_rest_international.scan_form_response as manifest import time import typing import karrio.lib as lib import karrio.core.models as models -import karrio.providers.usps.error as error -import karrio.providers.usps.utils as provider_utils -import karrio.providers.usps.units as provider_units +import karrio.providers.usps_rest_international.error as error +import karrio.providers.usps_rest_international.utils as provider_utils +import karrio.providers.usps_rest_international.units as provider_units def parse_manifest_response( @@ -65,7 +65,6 @@ def manifest_request( # fmt: on ), ) - print(options.items()) # map data to convert karrio model to usps specific type request = usps.ScanFormRequestType( diff --git a/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/pickup/__init__.py b/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/pickup/__init__.py new file mode 100644 index 0000000000..2d943f38d8 --- /dev/null +++ b/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/pickup/__init__.py @@ -0,0 +1,12 @@ +from karrio.providers.usps_rest_international.pickup.create import ( + parse_pickup_response, + pickup_request, +) +from karrio.providers.usps_rest_international.pickup.update import ( + parse_pickup_update_response, + pickup_update_request, +) +from karrio.providers.usps_rest_international.pickup.cancel import ( + parse_pickup_cancel_response, + pickup_cancel_request, +) diff --git a/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/pickup/cancel.py b/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/pickup/cancel.py new file mode 100644 index 0000000000..238d682a5a --- /dev/null +++ b/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/pickup/cancel.py @@ -0,0 +1,40 @@ +import typing +import karrio.lib as lib +import karrio.core.units as units +import karrio.core.models as models +import karrio.providers.usps_rest_international.error as error +import karrio.providers.usps_rest_international.utils as provider_utils +import karrio.providers.usps_rest_international.units as provider_units + + +def parse_pickup_cancel_response( + _response: lib.Deserializable[dict], + settings: provider_utils.Settings, +) -> typing.Tuple[models.ConfirmationDetails, typing.List[models.Message]]: + response = _response.deserialize() + messages = error.parse_error_response(response, settings) + success = response.get("ok") == True + + confirmation = ( + models.ConfirmationDetails( + carrier_id=settings.carrier_id, + carrier_name=settings.carrier_name, + operation="Cancel Pickup", + success=success, + ) + if success + else None + ) + + return confirmation, messages + + +def pickup_cancel_request( + payload: models.PickupCancelRequest, + settings: provider_utils.Settings, +) -> lib.Serializable: + + # map data to convert karrio model to usps specific type + request = dict(confirmationNumber=payload.confirmation_number) + + return lib.Serializable(request, lib.to_dict) diff --git a/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/pickup/create.py b/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/pickup/create.py new file mode 100644 index 0000000000..7d1bba3f79 --- /dev/null +++ b/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/pickup/create.py @@ -0,0 +1,102 @@ +"""Karrio USPS schedule pickup implementation.""" + +import karrio.schemas.usps_rest_international.pickup_request as usps +import karrio.schemas.usps_rest_international.pickup_response as pickup + +import typing +import karrio.lib as lib +import karrio.core.units as units +import karrio.core.models as models +import karrio.providers.usps_rest_international.error as error +import karrio.providers.usps_rest_international.utils as provider_utils +import karrio.providers.usps_rest_international.units as provider_units + + +def parse_pickup_response( + _response: lib.Deserializable[dict], + settings: provider_utils.Settings, +) -> typing.Tuple[typing.List[models.RateDetails], typing.List[models.Message]]: + response = _response.deserialize() + + messages = error.parse_error_response(response, settings) + pickup = ( + _extract_details(response, settings) + if "confirmationNumber" in response + else None + ) + + return pickup, messages + + +def _extract_details( + data: dict, + settings: provider_utils.Settings, +) -> models.PickupDetails: + details = lib.to_object(pickup.PickupResponseType, data) + + return models.PickupDetails( + carrier_id=settings.carrier_id, + carrier_name=settings.carrier_name, + confirmation_number=details.confirmationNumber, + pickup_date=lib.fdate(details.pickupDate), + ) + + +def pickup_request( + payload: models.PickupRequest, + settings: provider_utils.Settings, +) -> lib.Serializable: + address = lib.to_address(payload.address) + packages = lib.to_packages(payload.parcels) + options = lib.units.Options( + payload.options, + option_type=lib.units.create_enum( + "PickupOptions", + # fmt: off + { + "usps_package_type": lib.OptionEnum("usps_package_type"), + }, + # fmt: on + ), + ) + + # map data to convert karrio model to usps specific type + request = usps.PickupRequestType( + pickupDate=lib.fdate(payload.pickup_date), + pickupAddress=usps.PickupAddressType( + firstName=address.person_name, + lastName=None, + firm=address.company_name, + address=usps.AddressType( + streetAddress=address.address_line1, + secondaryAddress=address.address_line2, + city=address.city, + state=address.state, + ZIPCode=lib.to_zip5(address.postal_code), + ZIPPlus4=lib.to_zip4(address.postal_code) or "", + urbanization=None, + ), + contact=[ + usps.ContactType(email=address.email) + for _ in [address.email] + if _ is not None + ], + ), + packages=[ + usps.PackageType( + packageType=options.usps_package_type.state or "OTHER", + packageCount=len(packages), + ) + ], + estimatedWeight=packages.weight.LB, + pickupLocation=lib.identity( + usps.PickupLocationType( + packageLocation=payload.package_location, + specialInstructions=payload.instruction, + ) + if any([payload.package_location, payload.instruction]) + else None + ), + ) + + return lib.Serializable(request, lib.to_dict) diff --git a/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/pickup/update.py b/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/pickup/update.py new file mode 100644 index 0000000000..271a607a3b --- /dev/null +++ b/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/pickup/update.py @@ -0,0 +1,109 @@ +"""Karrio USPS update pickup implementation.""" + +import karrio.schemas.usps_rest_international.pickup_update_request as usps +import karrio.schemas.usps_rest_international.pickup_update_response as pickup + +import typing +import karrio.lib as lib +import karrio.core.units as units +import karrio.core.models as models +import karrio.providers.usps_rest_international.error as error +import karrio.providers.usps_rest_international.utils as provider_utils +import karrio.providers.usps_rest_international.units as provider_units + + +def parse_pickup_update_response( + _response: lib.Deserializable[dict], + settings: provider_utils.Settings, +) -> typing.Tuple[typing.List[models.RateDetails], typing.List[models.Message]]: + response = _response.deserialize() + + messages = error.parse_error_response(response, settings) + pickup = ( + _extract_details(response, settings) + if "confirmationNumber" in response + else None + ) + + return pickup, messages + + +def _extract_details( + data: dict, + settings: provider_utils.Settings, +) -> models.PickupDetails: + details = lib.to_object(pickup.PickupUpdateResponseType, data) + + return models.PickupDetails( + carrier_id=settings.carrier_id, + carrier_name=settings.carrier_name, + confirmation_number=details.confirmationNumber, + pickup_date=lib.fdate(details.pickupDate), + ) + + +def pickup_update_request( + payload: models.PickupUpdateRequest, + settings: provider_utils.Settings, +) -> lib.Serializable: + address = lib.to_address(payload.address) + packages = lib.to_packages(payload.parcels) + options = lib.units.Options( + payload.options, + option_type=lib.units.create_enum( + "PickupOptions", + # fmt: off + { + "usps_package_type": lib.OptionEnum("usps_package_type"), + }, + # fmt: on + ), + ) + + # map data to convert karrio model to usps specific type + request = usps.PickupUpdateRequestType( + pickupDate=lib.fdate(payload.pickup_date), + carrierPickupRequest=usps.CarrierPickupRequestType( + pickupDate=lib.fdate(payload.pickup_date), + pickupAddress=usps.PickupAddressType( + firstName=address.person_name, + lastName=None, + firm=address.company_name, + address=usps.AddressType( + streetAddress=address.address_line1, + secondaryAddress=address.address_line2, + city=address.city, + state=address.state, + ZIPCode=lib.to_zip5(address.postal_code), + ZIPPlus4=lib.to_zip4(address.postal_code) or "", + urbanization=None, + ), + contact=[ + usps.ContactType(email=address.email) + for _ in [address.email] + if _ is not None + ], + ), + packages=[ + usps.PackageType( + packageType=options.usps_package_type.state or "OTHER", + packageCount=len(packages), + ) + ], + estimatedWeight=packages.weight.LB, + pickupLocation=lib.identity( + usps.PickupLocationType( + packageLocation=payload.package_location, + specialInstructions=payload.instruction, + ) + if any([payload.package_location, payload.instruction]) + else None + ), + ), + ) + + return lib.Serializable( + request, + lib.to_dict, + dict(confirmationNumber=payload.confirmation_number), + ) diff --git a/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/rate.py b/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/rate.py new file mode 100644 index 0000000000..76a6031cd6 --- /dev/null +++ b/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/rate.py @@ -0,0 +1,122 @@ +"""Karrio USPS rating API implementation.""" + +import karrio.schemas.usps_rest_international.rate_request as usps +import karrio.schemas.usps_rest_international.rate_response as rating + +import time +import typing +import karrio.lib as lib +import karrio.core.units as units +import karrio.core.models as models +import karrio.core.errors as errors +import karrio.providers.usps_rest_international.error as error +import karrio.providers.usps_rest_international.utils as provider_utils +import karrio.providers.usps_rest_international.units as provider_units + + +def parse_rate_response( + _response: lib.Deserializable[dict], + settings: provider_utils.Settings, +) -> typing.Tuple[typing.List[models.RateDetails], typing.List[models.Message]]: + responses = _response.deserialize() + + messages = error.parse_error_response(responses, settings) + rates = lib.to_multi_piece_rates( + [ + ( + f"{_}", + [_extract_details(rate, settings) for rate in response["rateOptions"]], + ) + for _, response in enumerate(responses, start=1) + if response.get("rateOptions") is not None + ] + ) + + return rates, messages + + +def _extract_details( + data: dict, + settings: provider_utils.Settings, +) -> models.RateDetails: + rate = lib.to_object(rating.RateOptionType, data) + mail_class = rate.rates[0].mailClass + service = provider_units.ShippingService.map(mail_class) + charges = [ + ("Base Charge", lib.to_money(rate.totalBasePrice)), + *[(_.description, lib.to_money(_.price)) for _ in rate.rates], + *[(_.name, lib.to_money(_.price)) for _ in rate.extraServices], + ] + + return models.RateDetails( + carrier_id=settings.carrier_id, + carrier_name=settings.carrier_name, + service=service.name_or_key, + total_charge=lib.to_money(rate.totalPrice), + currency="USD", + extra_charges=[ + models.ChargeDetails(name=name, currency="USD", amount=amount) + for name, amount in charges + ], + meta=dict( + service_name=service.name or mail_class, + zone=lib.failsafe(lambda: rate.rates[0].zone), + ), + ) + + +def rate_request( + payload: models.RateRequest, + settings: provider_utils.Settings, +) -> lib.Serializable: + shipper = lib.to_address(payload.shipper) + recipient = lib.to_address(payload.recipient) + + if shipper.country_code != units.Country.US.name: + raise errors.OriginNotServicedError(shipper.country_code) + + if recipient.country_code == units.Country.US.name: + raise errors.DestinationNotServicedError(recipient.country_code) + + services = lib.to_services(payload.services, provider_units.ShippingService) + options = lib.to_shipping_options( + payload.options, + initializer=provider_units.shipping_options_initializer, + ) + packages = lib.to_packages( + payload.parcels, + options=options, + package_option_type=provider_units.ShippingOption, + shipping_options_initializer=provider_units.shipping_options_initializer, + ) + + # map data to convert karrio model to usps specific type + request = [ + usps.RateRequestType( + originZIPCode=shipper.postal_code, + foreignPostalCode=recipient.postal_code, + destinationCountryCode=recipient.country_code, + weight=package.weight.LB, + length=package.length.IN, + width=package.width.IN, + height=package.height.IN, + mailClass=getattr( + services.first, "value", provider_units.ShippingService.usps_all.value + ), + priceType=package.options.usps_price_type.state or "RETAIL", + mailingDate=lib.fdate( + package.options.shipment_date.state or time.strftime("%Y-%m-%d") + ), + accountType=settings.account_type or "EPS", + accountNumber=settings.account_number, + itemValue=package.items.value_amount, + extraServices=[ + lib.to_int(_.code) + for __, _ in options.items() + if __ not in provider_units.CUSTOM_OPTIONS + ], + ) + for package in packages + ] + + return lib.Serializable(request, lib.to_dict) diff --git a/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/shipment/__init__.py b/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/shipment/__init__.py new file mode 100644 index 0000000000..266e8b7789 --- /dev/null +++ b/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/shipment/__init__.py @@ -0,0 +1,8 @@ +from karrio.providers.usps_rest_international.shipment.create import ( + parse_shipment_response, + shipment_request, +) +from karrio.providers.usps_rest_international.shipment.cancel import ( + parse_shipment_cancel_response, + shipment_cancel_request, +) diff --git a/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/shipment/cancel.py b/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/shipment/cancel.py new file mode 100644 index 0000000000..467ba1a805 --- /dev/null +++ b/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/shipment/cancel.py @@ -0,0 +1,53 @@ +import typing +import karrio.lib as lib +import karrio.core.models as models +import karrio.providers.usps_rest_international.error as error +import karrio.providers.usps_rest_international.utils as provider_utils +import karrio.providers.usps_rest_international.units as provider_units + + +def parse_shipment_cancel_response( + _response: lib.Deserializable[typing.List[typing.Tuple[str, dict]]], + settings: provider_utils.Settings, +) -> typing.Tuple[models.ConfirmationDetails, typing.List[models.Message]]: + responses = _response.deserialize() + messages: typing.List[models.Message] = sum( + [ + error.parse_error_response(response, settings, tracking_number=_) + for _, response in responses + ], + start=[], + ) + success = all([_["ok"] for __, _ in responses]) + + confirmation = ( + models.ConfirmationDetails( + carrier_id=settings.carrier_id, + carrier_name=settings.carrier_name, + operation="Cancel Shipment", + success=success, + ) + if success + else None + ) + + return confirmation, messages + + +def shipment_cancel_request( + payload: models.ShipmentCancelRequest, + settings: provider_utils.Settings, +) -> lib.Serializable: + + # map data to convert karrio model to usps specific type + request = [ + dict(trackingNumber=_) + for _ in set( + [ + payload.shipment_identifier, + *((payload.options or {}).get("shipment_identifiers") or []), + ] + ) + ] + + return lib.Serializable(request, lib.to_dict) diff --git a/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/shipment/create.py b/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/shipment/create.py new file mode 100644 index 0000000000..36e431d3ff --- /dev/null +++ b/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/shipment/create.py @@ -0,0 +1,243 @@ +"""Karrio USPS create label implementation.""" + +import karrio.schemas.usps_rest_international.label_request as usps +import karrio.schemas.usps_rest_international.label_response as shipping + +import time +import typing +import karrio.lib as lib +import karrio.core.units as units +import karrio.core.models as models +import karrio.core.errors as errors +import karrio.providers.usps_rest_international.error as error +import karrio.providers.usps_rest_international.utils as provider_utils +import karrio.providers.usps_rest_international.units as provider_units + + +def parse_shipment_response( + _response: lib.Deserializable[typing.List[dict]], + settings: provider_utils.Settings, +) -> typing.Tuple[typing.List[models.RateDetails], typing.List[models.Message]]: + responses = _response.deserialize() + + shipment = lib.to_multi_piece_shipment( + [ + ( + f"{_}", + _extract_details(response, settings, _response.ctx), + ) + for _, response in enumerate(responses, start=1) + if response.get("error") is None + ] + ) + messages: typing.List[models.Message] = sum( + [error.parse_error_response(response, settings) for response in responses], + start=[], + ) + + return shipment, messages + + +def _extract_details( + data: dict, + settings: provider_utils.Settings, + ctx: dict = None, +) -> models.ShipmentDetails: + details = lib.to_object(shipping.LabelResponseType, data) + label = details.labelImage + label_type = ctx.get("label_type", "PDF") + + return models.ShipmentDetails( + carrier_id=settings.carrier_id, + carrier_name=settings.carrier_name, + tracking_number=details.labelMetadata.internationalTrackingNumber, + shipment_identifier=details.labelMetadata.internationalTrackingNumber, + label_type=label_type, + docs=models.Documents(label=label), + meta=dict( + SKU=details.labelMetadata.SKU, + postage=details.labelMetadata.postage, + ), + ) + + +def shipment_request( + payload: models.ShipmentRequest, + settings: provider_utils.Settings, +) -> lib.Serializable: + shipper = lib.to_address(payload.shipper) + recipient = lib.to_address(payload.recipient) + + if shipper.country_code != units.Country.US.name: + raise errors.OriginNotServicedError(shipper.country_code) + + if recipient.country_code == units.Country.US.name: + raise errors.DestinationNotServicedError(recipient.country_code) + + service = provider_units.ShippingService.map(payload.service).value_or_key + options = lib.to_shipping_options( + payload.options, + initializer=provider_units.shipping_options_initializer, + ) + packages = lib.to_packages( + payload.parcels, + options=options, + package_option_type=provider_units.ShippingOption, + shipping_options_initializer=provider_units.shipping_options_initializer, + ) + customs = lib.to_customs_info( + payload.customs, + shipper=payload.shipper, + recipient=payload.recipient, + weight_unit=units.WeightUnit.LB.name, + ) + pickup_location = lib.to_address(options.hold_for_pickup_address.state) + label_type = provider_units.LabelType.map(payload.label_type).value or "PDF" + + # map data to convert karrio model to usps specific type + request = [ + usps.LabelRequestType( + imageInfo=usps.ImageInfoType( + imageType=label_type, + labelType="4X6LABEL", + ), + toAddress=usps.AddressType( + streetAddress=recipient.address_line1, + secondaryAddress=recipient.address_line2, + city=recipient.city, + state=recipient.state, + ZIPCode=lib.to_zip5(recipient.postal_code) or "", + ZIPPlus4=lib.to_zip4(recipient.postal_code) or "", + urbanization=None, + firstName=recipient.person_name, + lastName=None, + firm=recipient.company_name, + phone=recipient.phone_number, + email=recipient.email, + ignoreBadAddress=True, + platformUserId=None, + ), + fromAddress=usps.AddressType( + streetAddress=shipper.address_line1, + secondaryAddress=shipper.address_line2, + city=shipper.city, + state=shipper.state, + ZIPCode=lib.to_zip4(shipper.postal_code) or "", + ZIPPlus4=lib.to_zip5(shipper.postal_code) or "", + urbanization=None, + firstName=shipper.person_name, + lastName=None, + firm=shipper.company_name, + phone=shipper.phone_number, + email=shipper.email, + ignoreBadAddress=True, + platformUserId=None, + ), + senderAddress=usps.AddressType( + streetAddress=shipper.address_line1, + secondaryAddress=shipper.address_line2, + city=shipper.city, + state=shipper.state, + ZIPCode=lib.to_zip4(shipper.postal_code) or "", + ZIPPlus4=lib.to_zip5(shipper.postal_code) or "", + urbanization=None, + firstName=shipper.person_name, + lastName=None, + firm=shipper.company_name, + phone=shipper.phone_number, + email=shipper.email, + ignoreBadAddress=True, + platformUserId=None, + ), + packageDescription=usps.PackageDescriptionType( + weightUOM="lb", + weight=package.weight.LB, + dimensionsUOM="in", + length=package.length.IN, + height=package.height.IN, + width=package.width.IN, + girth=package.girth, + mailClass=service, + rateIndicator=package.options.usps_rate_indicator.state or "SP", + processingCategory=lib.identity( + package.options.usps_processing_category.state or "NON_MACHINABLE" + ), + destinationEntryFacilityType=lib.identity( + package.options.usps_destination_facility_type.state or "NONE" + ), + destinationEntryFacilityAddress=lib.identity( + usps.DestinationEntryFacilityAddressType( + streetAddress=pickup_location.address_line1, + secondaryAddress=pickup_location.address_line2, + city=pickup_location.city, + state=pickup_location.state, + ZIPCode=lib.to_zip4(pickup_location.postal_code) or "", + ZIPPlus4=lib.to_zip5(pickup_location.postal_code) or "", + urbanization=None, + ) + if package.options.hold_for_pickup_address.state is not None + else None + ), + packageOptions=lib.identity( + usps.PackageOptionsType( + packageValue=package.total_value, + nonDeliveryOption=None, + redirectAddress=None, + generateGXEvent=None, + originalPackage=None, + ) + if (package.total_value or 0.0) > 0.0 + else None + ), + customerReference=[ + usps.CustomerReferenceType( + referenceNumber=reference, + ) + for reference in [payload.reference] + if reference is not None + ], + extraServices=[ + lib.to_int(_.code) + for __, _ in package.options.items() + if __ not in provider_units.CUSTOM_OPTIONS + ], + mailingDate=lib.fdate( + package.options.shipment_date.state or time.strftime("%Y-%m-%d") + ), + ), + customsForm=usps.CustomsFormType( + contentComments=customs.content_description, + restrictionType=package.options.usps_restriction_type.state, + restrictionComments=package.options.restrictionComments.state, + AESITN=customs.options.aes.state, + invoiceNumber=customs.invoice, + licenseNumber=customs.options.license_number.state, + certificateNumber=customs.options.certificate_number.state, + customsContentType=lib.identity( + provider_units.CustomsContentType.map(customs.content_type).value + or "OTHER" + ), + importersReference=None, + exportersReference=None, + contents=[ + usps.ContentType( + itemDescription=item.description, + itemQuantity=item.quantity, + itemValue=item.value_amount, + itemTotalValue=item.value_amount * item.quantity, + weightUOM="lb", + itemWeight=item.weight, + itemTotalWeight=item.weight * item.quantity, + HSTariffNumber=item.hs_code, + countryofOrigin=item.origin_country, + itemCategory=None, + itemSubcategory=None, + ) + for item in customs.commodities + ], + ), + ) + for package in packages + ] + + return lib.Serializable(request, lib.to_dict, dict(label_type=label_type)) diff --git a/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/tracking.py b/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/tracking.py new file mode 100644 index 0000000000..823f8eead5 --- /dev/null +++ b/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/tracking.py @@ -0,0 +1,99 @@ +"""Karrio USPS rating API implementation.""" + +# import karrio.schemas.usps_rest_international.tracking_request as usps +import karrio.schemas.usps_rest_international.tracking_response as tracking + +import typing +import karrio.lib as lib +import karrio.core.units as units +import karrio.core.models as models +import karrio.providers.usps_rest_international.error as error +import karrio.providers.usps_rest_international.utils as provider_utils +import karrio.providers.usps_rest_international.units as provider_units + + +def parse_tracking_response( + _response: lib.Deserializable[typing.List[typing.Tuple[str, dict]]], + settings: provider_utils.Settings, +) -> typing.Tuple[typing.List[models.TrackingDetails], typing.List[models.Message]]: + responses = _response.deserialize() + + messages: typing.List[models.Message] = sum( + [ + error.parse_error_response(response, settings, tracking_number=_) + for _, response in responses + ], + start=[], + ) + tracking_details = [ + _extract_details(details, settings) + for _, details in responses + if "error" not in details + ] + + return tracking_details, messages + + +def _extract_details( + data: dict, + settings: provider_utils.Settings, +) -> models.TrackingDetails: + details = lib.to_object(tracking.TrackingResponseType, data) + status = next( + ( + status.name + for status in list(provider_units.TrackingStatus) + if getattr(details, "status", None) in status.value + ), + provider_units.TrackingStatus.in_transit.name, + ) + + return models.TrackingDetails( + carrier_id=settings.carrier_id, + carrier_name=settings.carrier_name, + tracking_number=details.trackingNumber, + events=[ + models.TrackingEvent( + date=lib.fdate(event.eventTimestamp, "%Y-%m-%dT%H:%M:%SZ"), + description=event.name, + code=event.eventType, + time=lib.flocaltime(event.eventTimestamp, "%Y-%m-%dT%H:%M:%SZ"), + location=lib.text( + event.eventCity, + event.eventZIP, + event.eventState, + event.eventCountry, + separator=", ", + ), + ) + for event in details.trackingEvents + ], + estimated_delivery=lib.fdate( + details.expectedDeliveryTimeStamp, + "%Y-%m-%dT%H:%M:%SZ", + ), + delivered=status == "delivered", + status=status, + info=models.TrackingInfo( + # fmt: off + carrier_tracking_link=settings.tracking_url.format(details.trackingNumber), + expected_delivery=lib.fdate(details.expectedDeliveryTimeStamp, "%Y-%m-%dT%H:%M:%SZ"), + shipment_service=provider_units.ShippingService.map(details.serviceTypeCode).name_or_key, + shipment_origin_country=details.originCountry, + shipment_origin_postal_code=details.originZIP, + shipment_destination_country=details.destinationCountryCode, + shipment_destination_postal_code=details.destinationZIP, + # fmt: on + ), + ) + + +def tracking_request( + payload: models.TrackingRequest, + settings: provider_utils.Settings, +) -> lib.Serializable: + + # map data to convert karrio model to usps specific type + request = payload.tracking_numbers + + return lib.Serializable(request, lib.to_dict) diff --git a/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/units.py b/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/units.py new file mode 100644 index 0000000000..44cb851c4e --- /dev/null +++ b/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/units.py @@ -0,0 +1,204 @@ +import karrio.lib as lib +import karrio.core.units as units + + +class PackagingType(lib.StrEnum): + """Carrier specific packaging type""" + + PACKAGE = "PACKAGE" + + """ Unified Packaging type mapping """ + envelope = PACKAGE + pak = PACKAGE + tube = PACKAGE + pallet = PACKAGE + small_box = PACKAGE + medium_box = PACKAGE + your_packaging = PACKAGE + + +class CustomsContentType(lib.StrEnum): + merchandise = "MERCHANDISE" + gift = "GIFT" + document = "DOCUMENT" + commercial_sample = "COMMERCIAL_SAMPLE" + returned_goods = "RETURNED_GOODS" + other = "OTHER" + humanitarian_donations = "HUMANITARIAN_DONATIONS" + dangerous_goods = "DANGEROUS_GOODS" + cremated_remains = "CREMATED_REMAINS" + non_negotiable_document = "NON_NEGOTIABLE_DOCUMENT" + medical_supplies = "MEDICAL_SUPPLIES" + pharmaceuticals = "PHARMACEUTICALS" + + """ Unified Content type mapping """ + + documents = document + sample = commercial_sample + return_merchandise = returned_goods + + +class LabelType(lib.StrEnum): + """Carrier specific label type""" + + PDF = "PDF" + TIFF = "TIFF" + JPG = "JPG" + SVG = "SVG" + ZPL203DPI = "ZPL203DPI" + ZPL300DPI = "ZPL300DPI" + LABEL_BROKER = "LABEL_BROKER" + NONE = "NONE" + + """ Unified Label type mapping """ + ZPL = ZPL300DPI + PNG = JPG + + +class ShippingService(lib.StrEnum): + """Carrier specific services""" + + usps_standard_service = "USPS Standard Service" + usps_parcel_select = "PARCEL_SELECT" + usps_parcel_select_lightweight = "PARCEL_SELECT_LIGHTWEIGHT" + usps_priority_mail_express = "PRIORITY_MAIL_EXPRESS" + usps_priority_mail = "PRIORITY_MAIL" + usps_first_class_package_service = "FIRST-CLASS_PACKAGE_SERVICE" + usps_library_mail = "LIBRARY_MAIL" + usps_media_mail = "MEDIA_MAIL" + usps_bound_printed_matter = "BOUND_PRINTED_MATTER" + usps_connect_local = "USPS_CONNECT_LOCAL" + usps_connect_mail = "USPS_CONNECT_MAIL" + usps_connect_next_day = "USPS_CONNECT_NEXT_DAY" + usps_connect_regional = "USPS_CONNECT_REGIONAL" + usps_connect_same_day = "USPS_CONNECT_SAME_DAY" + usps_ground_advantage = "USPS_GROUND_ADVANTAGE" + usps_retail_ground = "USPS_RETAIL_GROUND" + usps_all = "ALL" + + +class ShippingOption(lib.Enum): + """Carrier specific options""" + + # fmt: off + usps_label_delivery_service = lib.OptionEnum("415", bool) + usps_tracking_plus_6_months = lib.OptionEnum("480", bool) + usps_tracking_plus_1_year = lib.OptionEnum("481", bool) + usps_tracking_plus_3_years = lib.OptionEnum("482", bool) + usps_tracking_plus_5_years = lib.OptionEnum("483", bool) + usps_tracking_plus_7_years = lib.OptionEnum("484", bool) + usps_tracking_plus_10_years = lib.OptionEnum("485", bool) + usps_tracking_plus_signature_3_years = lib.OptionEnum("486", bool) + usps_tracking_plus_signature_5_years = lib.OptionEnum("487", bool) + usps_tracking_plus_signature_7_years = lib.OptionEnum("488", bool) + usps_tracking_plus_signature_10_years = lib.OptionEnum("489", bool) + usps_hazardous_materials_air_eligible_ethanol = lib.OptionEnum("810", bool) + usps_hazardous_materials_class_1_toy_propellant_safety_fuse_package = lib.OptionEnum("811", bool) + usps_hazardous_materials_class_3_flammable_and_combustible_liquids = lib.OptionEnum("812", bool) + usps_hazardous_materials_class_7_radioactive_materials = lib.OptionEnum("813", bool) + usps_hazardous_materials_class_8_air_eligible_corrosive_materials = lib.OptionEnum("814", bool) + usps_hazardous_materials_class_8_nonspillable_wet_batteries = lib.OptionEnum("815", bool) + usps_hazardous_materials_class_9_lithium_battery_marked_ground_only = lib.OptionEnum("816", bool) + usps_hazardous_materials_class_9_lithium_battery_returns = lib.OptionEnum("817", bool) + usps_hazardous_materials_class_9_marked_lithium_batteries = lib.OptionEnum("818", bool) + usps_hazardous_materials_class_9_dry_ice = lib.OptionEnum("819", bool) + usps_hazardous_materials_class_9_unmarked_lithium_batteries = lib.OptionEnum("820", bool) + usps_hazardous_materials_class_9_magnetized_materials = lib.OptionEnum("821", bool) + usps_hazardous_materials_division_4_1_mailable_flammable_solids_and_safety_matches = lib.OptionEnum("822", bool) + usps_hazardous_materials_division_5_1_oxidizers = lib.OptionEnum("823", bool) + usps_hazardous_materials_division_5_2_organic_peroxides = lib.OptionEnum("824", bool) + usps_hazardous_materials_division_6_1_toxic_materials = lib.OptionEnum("825", bool) + usps_hazardous_materials_division_6_2_biological_materials = lib.OptionEnum("826", bool) + usps_hazardous_materials_excepted_quantity_provision = lib.OptionEnum("827", bool) + usps_hazardous_materials_ground_only_hazardous_materials = lib.OptionEnum("828", bool) + usps_hazardous_materials_air_eligible_id8000_consumer_commodity = lib.OptionEnum("829", bool) + usps_hazardous_materials_lighters = lib.OptionEnum("830", bool) + usps_hazardous_materials_limited_quantity_ground = lib.OptionEnum("831", bool) + usps_hazardous_materials_small_quantity_provision_markings_required = lib.OptionEnum("832", bool) + usps_hazardous_materials = lib.OptionEnum("857", bool) + usps_certified_mail = lib.OptionEnum("910", bool) + usps_certified_mail_restricted_delivery = lib.OptionEnum("911", bool) + usps_certified_mail_adult_signature_required = lib.OptionEnum("912", bool) + usps_certified_mail_adult_signature_restricted_delivery = lib.OptionEnum("913", bool) + usps_collect_on_delivery = lib.OptionEnum("915", float) + usps_collect_on_delivery_restricted_delivery = lib.OptionEnum("917", bool) + usps_tracking_electronic = lib.OptionEnum("920", bool) + usps_signature_confirmation = lib.OptionEnum("921", bool) + usps_adult_signature_required = lib.OptionEnum("922", bool) + usps_adult_signature_restricted_delivery = lib.OptionEnum("923", bool) + usps_signature_confirmation_restricted_delivery = lib.OptionEnum("924", bool) + usps_priority_mail_express_merchandise_insurance = lib.OptionEnum("925", bool) + usps_insurance_bellow_500 = lib.OptionEnum("930", float) + usps_insurance_above_500 = lib.OptionEnum("931", float) + usps_insurance_restricted_delivery = lib.OptionEnum("934", bool) + usps_registered_mail = lib.OptionEnum("940", bool) + usps_registered_mail_restricted_delivery = lib.OptionEnum("941", bool) + usps_return_receipt = lib.OptionEnum("955", bool) + usps_return_receipt_electronic = lib.OptionEnum("957", bool) + usps_signature_requested_priority_mail_express_only = lib.OptionEnum("981", bool) + usps_parcel_locker_delivery = lib.OptionEnum("984", bool) + usps_po_to_addressee_priority_mail_express_only = lib.OptionEnum("986", bool) + usps_sunday_delivery = lib.OptionEnum("981", bool) + # fmt: on + + """ Custom Options """ + usps_price_type = lib.OptionEnum("priceType") + usps_facility_id = lib.OptionEnum("facilityId") + usps_hold_for_pickup = lib.OptionEnum("holdForPickup", bool) + usps_rate_indicator = lib.OptionEnum("rateIndicator") + usps_processing_category = lib.OptionEnum("processingCategory") + usps_carrier_release = lib.OptionEnum("carrierRelease", bool) + usps_physical_signature_required = lib.OptionEnum("physicalSignatureRequired", bool) + usps_restriction_type = lib.OptionEnum("restrictionType") + + """ Unified Option type mapping """ + cash_on_delivery = usps_collect_on_delivery + signature_confirmation = usps_signature_confirmation + sunday_delivery = usps_sunday_delivery + hold_at_location = usps_hold_for_pickup + + +CUSTOM_OPTIONS = [ + ShippingOption.usps_price_type.name, + ShippingOption.usps_facility_id.name, + ShippingOption.usps_hold_for_pickup.name, + ShippingOption.usps_rate_indicator.name, + ShippingOption.usps_processing_category.name, + ShippingOption.usps_carrier_release.name, + ShippingOption.usps_physical_signature_required.name, +] + + +def shipping_options_initializer( + options: dict, + package_options: units.ShippingOptions = None, +) -> units.ShippingOptions: + """ + Apply default values to the given options. + """ + + if package_options is not None: + options.update(package_options.content) + + if "insurance" in options: + if lib.to_money(options["insurance"]) > 500: + options[ShippingOption.usps_insurance_above_500.name] = options["insurance"] + else: + options[ShippingOption.usps_insurance_bellow_500.name] = options[ + "insurance" + ] + + def items_filter(key: str) -> bool: + return key in ShippingOption # type: ignore + + return units.ShippingOptions(options, ShippingOption, items_filter=items_filter) + + +class TrackingStatus(lib.Enum): + on_hold = ["on_hold"] + delivered = ["delivered"] + in_transit = ["in_transit"] + delivery_failed = ["delivery_failed"] + delivery_delayed = ["delivery_delayed"] + out_for_delivery = ["out_for_delivery"] + ready_for_pickup = ["ready_for_pickup"] diff --git a/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/utils.py b/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/utils.py new file mode 100644 index 0000000000..fbfacb0d09 --- /dev/null +++ b/modules/connectors/usps_rest_international/karrio/providers/usps_rest_international/utils.py @@ -0,0 +1,87 @@ +import datetime +import karrio.lib as lib +import karrio.core as core +import karrio.core.errors as errors + + +class Settings(core.Settings): + """USPS connection settings.""" + + # Add carrier specific api connection properties here + client_id: str + client_secret: str + account_type: str = None + account_number: str = None + + @property + def carrier_name(self): + return "usps_rest_international" + + @property + def server_url(self): + return "https://api.usps.com" + + @property + def tracking_url(self): + return "https://tools.usps.com/go/TrackConfirmAction?tLabels={}" + + @property + def connection_config(self) -> lib.units.Options: + return lib.to_connection_config( + self.config or {}, + option_type=ConnectionConfig, + ) + + @property + def access_token(self): + """Retrieve the access_token using the client_id|client_secret pair + or collect it from the cache if an unexpired access_token exist. + """ + cache_key = f"{self.carrier_name}|{self.client_id}|{self.client_secret}" + now = datetime.datetime.now() + datetime.timedelta(minutes=30) + + auth = self.connection_cache.get(cache_key) or {} + token = auth.get("access_token") + expiry = lib.to_date(auth.get("expiry"), current_format="%Y-%m-%d %H:%M:%S") + + if token is not None and expiry is not None and expiry > now: + return token + + self.connection_cache.set(cache_key, lambda: login(self)) + new_auth = self.connection_cache.get(cache_key) + + return new_auth["access_token"] + + +def login(settings: Settings, client_id: str = None, client_secret: str = None): + import karrio.providers.usps_rest_international.error as error + + result = lib.request( + url=f"{settings.server_url}/oauth2/v3/token", + method="POST", + headers={"content-Type": "application/x-www-form-urlencoded"}, + data=lib.to_query_string( + dict( + grant_type="client_credentials", + client_id=client_id, + client_secret=client_secret, + ) + ), + ) + + response = lib.to_dict(result) + messages = error.parse_error_response(response, settings) + + if any(messages): + raise errors.ShippingSDKError(messages) + + expiry = datetime.datetime.now() + datetime.timedelta( + seconds=float(response.get("expires_in", 0)) + ) + + return {**response, "expiry": lib.fdatetime(expiry)} + + +class ConnectionConfig(lib.Enum): + shipping_options = lib.OptionEnum("shipping_options", list) + shipping_services = lib.OptionEnum("shipping_services", list) diff --git a/modules/connectors/usps_rest_international/karrio/schemas/usps_rest_international/__init__.py b/modules/connectors/usps_rest_international/karrio/schemas/usps_rest_international/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/error_response.py b/modules/connectors/usps_rest_international/karrio/schemas/usps_rest_international/error_response.py similarity index 100% rename from modules/connectors/usps_international/karrio/schemas/usps_international/error_response.py rename to modules/connectors/usps_rest_international/karrio/schemas/usps_rest_international/error_response.py diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/label_request.py b/modules/connectors/usps_rest_international/karrio/schemas/usps_rest_international/label_request.py similarity index 100% rename from modules/connectors/usps_international/karrio/schemas/usps_international/label_request.py rename to modules/connectors/usps_rest_international/karrio/schemas/usps_rest_international/label_request.py diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/label_response.py b/modules/connectors/usps_rest_international/karrio/schemas/usps_rest_international/label_response.py similarity index 100% rename from modules/connectors/usps_international/karrio/schemas/usps_international/label_response.py rename to modules/connectors/usps_rest_international/karrio/schemas/usps_rest_international/label_response.py diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/pickup_request.py b/modules/connectors/usps_rest_international/karrio/schemas/usps_rest_international/pickup_request.py similarity index 100% rename from modules/connectors/usps_international/karrio/schemas/usps_international/pickup_request.py rename to modules/connectors/usps_rest_international/karrio/schemas/usps_rest_international/pickup_request.py diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/pickup_response.py b/modules/connectors/usps_rest_international/karrio/schemas/usps_rest_international/pickup_response.py similarity index 100% rename from modules/connectors/usps_international/karrio/schemas/usps_international/pickup_response.py rename to modules/connectors/usps_rest_international/karrio/schemas/usps_rest_international/pickup_response.py diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/pickup_update_request.py b/modules/connectors/usps_rest_international/karrio/schemas/usps_rest_international/pickup_update_request.py similarity index 100% rename from modules/connectors/usps_international/karrio/schemas/usps_international/pickup_update_request.py rename to modules/connectors/usps_rest_international/karrio/schemas/usps_rest_international/pickup_update_request.py diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/pickup_update_response.py b/modules/connectors/usps_rest_international/karrio/schemas/usps_rest_international/pickup_update_response.py similarity index 100% rename from modules/connectors/usps_international/karrio/schemas/usps_international/pickup_update_response.py rename to modules/connectors/usps_rest_international/karrio/schemas/usps_rest_international/pickup_update_response.py diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/rate_request.py b/modules/connectors/usps_rest_international/karrio/schemas/usps_rest_international/rate_request.py similarity index 100% rename from modules/connectors/usps_international/karrio/schemas/usps_international/rate_request.py rename to modules/connectors/usps_rest_international/karrio/schemas/usps_rest_international/rate_request.py diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/rate_response.py b/modules/connectors/usps_rest_international/karrio/schemas/usps_rest_international/rate_response.py similarity index 100% rename from modules/connectors/usps_international/karrio/schemas/usps_international/rate_response.py rename to modules/connectors/usps_rest_international/karrio/schemas/usps_rest_international/rate_response.py diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/scan_form_request.py b/modules/connectors/usps_rest_international/karrio/schemas/usps_rest_international/scan_form_request.py similarity index 100% rename from modules/connectors/usps_international/karrio/schemas/usps_international/scan_form_request.py rename to modules/connectors/usps_rest_international/karrio/schemas/usps_rest_international/scan_form_request.py diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/scan_form_response.py b/modules/connectors/usps_rest_international/karrio/schemas/usps_rest_international/scan_form_response.py similarity index 100% rename from modules/connectors/usps_international/karrio/schemas/usps_international/scan_form_response.py rename to modules/connectors/usps_rest_international/karrio/schemas/usps_rest_international/scan_form_response.py diff --git a/modules/connectors/usps_international/karrio/schemas/usps_international/tracking_response.py b/modules/connectors/usps_rest_international/karrio/schemas/usps_rest_international/tracking_response.py similarity index 100% rename from modules/connectors/usps_international/karrio/schemas/usps_international/tracking_response.py rename to modules/connectors/usps_rest_international/karrio/schemas/usps_rest_international/tracking_response.py diff --git a/modules/connectors/usps_international/schemas/error_response.json b/modules/connectors/usps_rest_international/schemas/error_response.json similarity index 100% rename from modules/connectors/usps_international/schemas/error_response.json rename to modules/connectors/usps_rest_international/schemas/error_response.json diff --git a/modules/connectors/usps_international/schemas/label_request.json b/modules/connectors/usps_rest_international/schemas/label_request.json similarity index 100% rename from modules/connectors/usps_international/schemas/label_request.json rename to modules/connectors/usps_rest_international/schemas/label_request.json diff --git a/modules/connectors/usps_international/schemas/label_response.json b/modules/connectors/usps_rest_international/schemas/label_response.json similarity index 100% rename from modules/connectors/usps_international/schemas/label_response.json rename to modules/connectors/usps_rest_international/schemas/label_response.json diff --git a/modules/connectors/usps_international/schemas/pickup_request.json b/modules/connectors/usps_rest_international/schemas/pickup_request.json similarity index 100% rename from modules/connectors/usps_international/schemas/pickup_request.json rename to modules/connectors/usps_rest_international/schemas/pickup_request.json diff --git a/modules/connectors/usps_international/schemas/pickup_response.json b/modules/connectors/usps_rest_international/schemas/pickup_response.json similarity index 100% rename from modules/connectors/usps_international/schemas/pickup_response.json rename to modules/connectors/usps_rest_international/schemas/pickup_response.json diff --git a/modules/connectors/usps_international/schemas/pickup_update_request.json b/modules/connectors/usps_rest_international/schemas/pickup_update_request.json similarity index 100% rename from modules/connectors/usps_international/schemas/pickup_update_request.json rename to modules/connectors/usps_rest_international/schemas/pickup_update_request.json diff --git a/modules/connectors/usps_international/schemas/pickup_update_response.json b/modules/connectors/usps_rest_international/schemas/pickup_update_response.json similarity index 100% rename from modules/connectors/usps_international/schemas/pickup_update_response.json rename to modules/connectors/usps_rest_international/schemas/pickup_update_response.json diff --git a/modules/connectors/usps_international/schemas/rate_request.json b/modules/connectors/usps_rest_international/schemas/rate_request.json similarity index 100% rename from modules/connectors/usps_international/schemas/rate_request.json rename to modules/connectors/usps_rest_international/schemas/rate_request.json diff --git a/modules/connectors/usps_international/schemas/rate_response.json b/modules/connectors/usps_rest_international/schemas/rate_response.json similarity index 100% rename from modules/connectors/usps_international/schemas/rate_response.json rename to modules/connectors/usps_rest_international/schemas/rate_response.json diff --git a/modules/connectors/usps_international/schemas/scan_form_request.json b/modules/connectors/usps_rest_international/schemas/scan_form_request.json similarity index 100% rename from modules/connectors/usps_international/schemas/scan_form_request.json rename to modules/connectors/usps_rest_international/schemas/scan_form_request.json diff --git a/modules/connectors/usps_international/schemas/scan_form_response.json b/modules/connectors/usps_rest_international/schemas/scan_form_response.json similarity index 100% rename from modules/connectors/usps_international/schemas/scan_form_response.json rename to modules/connectors/usps_rest_international/schemas/scan_form_response.json diff --git a/modules/connectors/usps_international/schemas/tracking_response.json b/modules/connectors/usps_rest_international/schemas/tracking_response.json similarity index 100% rename from modules/connectors/usps_international/schemas/tracking_response.json rename to modules/connectors/usps_rest_international/schemas/tracking_response.json diff --git a/modules/connectors/usps_rest_international/setup.py b/modules/connectors/usps_rest_international/setup.py new file mode 100644 index 0000000000..ba57deae4f --- /dev/null +++ b/modules/connectors/usps_rest_international/setup.py @@ -0,0 +1,27 @@ +"""Warning: This setup.py is only there for git install until poetry support git subdirectory""" + +from setuptools import setup, find_namespace_packages + +with open("README.md", "r") as fh: + long_description = fh.read() + +setup( + name="karrio.usps_rest_international", + version="2024.6-rc22", + description="Karrio - USPS Shipping Extension", + long_description=long_description, + long_description_content_type="text/markdown", + url="https://github.com/karrioapi/karrio", + author="karrio", + author_email="hello@karrio.io", + license="Apache-2.0", + packages=find_namespace_packages(exclude=["tests.*", "tests"]), + install_requires=["karrio"], + classifiers=[ + "Intended Audience :: Developers", + "Operating System :: OS Independent", + "Programming Language :: Python :: 3", + ], + zip_safe=False, + include_package_data=True, +) diff --git a/modules/connectors/usps_rest_international/tests/__init__.py b/modules/connectors/usps_rest_international/tests/__init__.py new file mode 100644 index 0000000000..5f6459630c --- /dev/null +++ b/modules/connectors/usps_rest_international/tests/__init__.py @@ -0,0 +1,5 @@ +from tests.usps_rest_international.test_rate import * +from tests.usps_rest_international.test_pickup import * +from tests.usps_rest_international.test_tracking import * +from tests.usps_rest_international.test_shipment import * +from tests.usps_rest_international.test_manifest import * diff --git a/modules/connectors/usps_rest_international/tests/usps_rest_international/__init__.py b/modules/connectors/usps_rest_international/tests/usps_rest_international/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/modules/connectors/usps_rest_international/tests/usps_rest_international/fixture.py b/modules/connectors/usps_rest_international/tests/usps_rest_international/fixture.py new file mode 100644 index 0000000000..58c4adb6e0 --- /dev/null +++ b/modules/connectors/usps_rest_international/tests/usps_rest_international/fixture.py @@ -0,0 +1,33 @@ +import karrio +import datetime +import karrio.lib as lib + +expiry = datetime.datetime.now() + datetime.timedelta(days=1) +client_id = "client_id" +client_secret = "client_secret" +cached_auth = { + f"usps_rest_international|{client_id}|{client_secret}": dict( + token_type="Bearer", + issued_at="1685542319575", + client_id=client_id, + access_token="access_token", + scope="addresses international-prices subscriptions payments pickup tracking labels scan-forms companies service-delivery-standards locations international-labels prices", + expires_in="14399", + refresh_count="0", + status="approved", + expiry=expiry.strftime("%Y-%m-%d %H:%M:%S"), + issuer="api.usps_rest_international.com", + application_name="Silver Shipper Developer", + api_products="[Shipping-Silver]", + public_key="LS0tLS1CRUdJTiBQVUJMSUMgS0VZLS0tLS0KTUlJQklqQU5CZ2txaGtpRzl3MEJBUUVGQUFPQ0FROEFNSUlCQ2dLQ0FRRUF4QWxwZjNSNEE1S0lwZnhJVWk1bgpMTFByZjZVZTV3MktzeGxSVzE1UWV0UzBjWGVxaW9OT2hXbDNaaVhEWEdKT3ZuK3RoY0NWVVQ3WC9JZWYvTENZCkhUWk1kYUJOdW55VHEwT2RNZmVkUU8zYUNKZmwvUnJPTHYyaG9TRDR4U1YxRzFuTTc1RTlRYitFZ1p0cmFEUXoKNW42SXRpMUMzOHFGMjU5NVRHUWVUemx3Wk1LQng1VTY2bGwzNzlkZ2plTUJxS3ppVHZHWEpOdVg5ZzRrRlBIaApTLzNERm9FNkVFSW8zUHExeDlXTnRaSm93VkRwQUVZZTQ3SU1UdXJDN2NGcXp2d3M1b1BDRHQ4c083N2lUdDN0Cm1vK3NrM2ExWnZSaGs2WUQ3Zkt1UldQVzFEYUM4dC9pazlnWnhqQndYNlZsSUhDRzRZSHlYejZteWdGV09jMmEKOVFJREFRQUIKLS0tLS1FTkQgUFVCTElDIEtFWS0tLS0t", + ) +} + +gateway = karrio.gateway["usps_rest_international"].create( + dict( + client_id="client_id", + client_secret="client_secret", + account_number="Your Account Number", + ), + cache=lib.Cache(**cached_auth), +) diff --git a/modules/connectors/usps_international/tests/usps_international/test_manifest.py b/modules/connectors/usps_rest_international/tests/usps_rest_international/test_manifest.py similarity index 98% rename from modules/connectors/usps_international/tests/usps_international/test_manifest.py rename to modules/connectors/usps_rest_international/tests/usps_rest_international/test_manifest.py index 53163638af..102de1bf0a 100644 --- a/modules/connectors/usps_international/tests/usps_international/test_manifest.py +++ b/modules/connectors/usps_rest_international/tests/usps_rest_international/test_manifest.py @@ -19,7 +19,7 @@ def test_create_tracking_request(self): self.assertEqual(request.serialize(), ManifestRequest) def test_create_manifest(self): - with patch("karrio.mappers.usps_international.proxy.lib.request") as mock: + with patch("karrio.mappers.usps_rest_international.proxy.lib.request") as mock: mock.return_value = "{}" karrio.Manifest.create(self.ManifestRequest).from_(gateway) @@ -29,7 +29,7 @@ def test_create_manifest(self): ) def test_parse_manifest_response(self): - with patch("karrio.mappers.usps_international.proxy.lib.request") as mock: + with patch("karrio.mappers.usps_rest_international.proxy.lib.request") as mock: mock.return_value = ManifestResponse parsed_response = ( karrio.Manifest.create(self.ManifestRequest).from_(gateway).parse() @@ -59,8 +59,8 @@ def test_parse_manifest_response(self): ParsedManifestResponse = [ { - "carrier_id": "usps_international", - "carrier_name": "usps_international", + "carrier_id": "usps_rest_international", + "carrier_name": "usps_rest_international", "doc": {"manifest": ANY}, "meta": {"manifestNumber": "string", "trackingNumbers": ["string"]}, }, diff --git a/modules/connectors/usps_international/tests/usps_international/test_pickup.py b/modules/connectors/usps_rest_international/tests/usps_rest_international/test_pickup.py similarity index 92% rename from modules/connectors/usps_international/tests/usps_international/test_pickup.py rename to modules/connectors/usps_rest_international/tests/usps_rest_international/test_pickup.py index e8b96ab6e7..67f1ee010a 100644 --- a/modules/connectors/usps_international/tests/usps_international/test_pickup.py +++ b/modules/connectors/usps_rest_international/tests/usps_rest_international/test_pickup.py @@ -31,7 +31,7 @@ def test_create_cancel_pickup_request(self): self.assertEqual(request.serialize(), PickupCancelRequest) def test_create_pickup(self): - with patch("karrio.mappers.usps_international.proxy.lib.request") as mock: + with patch("karrio.mappers.usps_rest_international.proxy.lib.request") as mock: mock.return_value = "{}" karrio.Pickup.schedule(self.PickupRequest).from_(gateway) @@ -41,7 +41,7 @@ def test_create_pickup(self): ) def test_update_pickup(self): - with patch("karrio.mappers.usps_international.proxy.lib.request") as mock: + with patch("karrio.mappers.usps_rest_international.proxy.lib.request") as mock: mock.return_value = "{}" karrio.Pickup.update(self.PickupUpdateRequest).from_(gateway) @@ -51,7 +51,7 @@ def test_update_pickup(self): ) def test_cancel_shipment(self): - with patch("karrio.mappers.usps_international.proxy.lib.request") as mock: + with patch("karrio.mappers.usps_rest_international.proxy.lib.request") as mock: mock.return_value = "{}" karrio.Pickup.cancel(self.PickupCancelRequest).from_(gateway) @@ -61,7 +61,7 @@ def test_cancel_shipment(self): ) def test_parse_pickup_response(self): - with patch("karrio.mappers.usps_international.proxy.lib.request") as mock: + with patch("karrio.mappers.usps_rest_international.proxy.lib.request") as mock: mock.return_value = PickupResponse parsed_response = ( karrio.Pickup.schedule(self.PickupRequest).from_(gateway).parse() @@ -71,7 +71,7 @@ def test_parse_pickup_response(self): self.assertListEqual(lib.to_dict(parsed_response), ParsedPickupResponse) def test_parse_cancel_pickup_response(self): - with patch("karrio.mappers.usps_international.proxy.lib.request") as mock: + with patch("karrio.mappers.usps_rest_international.proxy.lib.request") as mock: mock.return_value = PickupCancelResponse parsed_response = ( karrio.Pickup.cancel(self.PickupCancelRequest).from_(gateway).parse() @@ -129,8 +129,8 @@ def test_parse_cancel_pickup_response(self): ParsedPickupResponse = [ { - "carrier_id": "usps_international", - "carrier_name": "usps_international", + "carrier_id": "usps_rest_international", + "carrier_name": "usps_rest_international", "confirmation_number": "string", "pickup_date": "2019-08-24", }, @@ -139,8 +139,8 @@ def test_parse_cancel_pickup_response(self): ParsedCancelPickupResponse = [ { - "carrier_id": "usps_international", - "carrier_name": "usps_international", + "carrier_id": "usps_rest_international", + "carrier_name": "usps_rest_international", "operation": "Cancel Pickup", "success": True, }, diff --git a/modules/connectors/usps_rest_international/tests/usps_rest_international/test_rate.py b/modules/connectors/usps_rest_international/tests/usps_rest_international/test_rate.py new file mode 100644 index 0000000000..0fd5230aaa --- /dev/null +++ b/modules/connectors/usps_rest_international/tests/usps_rest_international/test_rate.py @@ -0,0 +1,169 @@ +import unittest +from unittest.mock import patch, ANY +from .fixture import gateway +from tests import logger + +import karrio +import karrio.lib as lib +import karrio.core.models as models + + +class TestUSPSRating(unittest.TestCase): + def setUp(self): + self.maxDiff = None + self.RateRequest = models.RateRequest(**RatePayload) + + def test_create_rate_request(self): + request = gateway.mapper.create_rate_request(self.RateRequest) + logger.debug(request.serialize()) + self.assertEqual(request.serialize(), RateRequest) + + def test_get_rate(self): + with patch("karrio.mappers.usps_rest_international.proxy.lib.request") as mock: + mock.return_value = "{}" + karrio.Rating.fetch(self.RateRequest).from_(gateway) + + self.assertEqual( + mock.call_args[1]["url"], + f"{gateway.settings.server_url}/v3/total-rates/search", + ) + + def test_parse_rate_response(self): + with patch("karrio.mappers.usps_rest_international.proxy.lib.request") as mock: + mock.return_value = RateResponse + parsed_response = ( + karrio.Rating.fetch(self.RateRequest).from_(gateway).parse() + ) + logger.debug(lib.to_dict(parsed_response)) + self.assertListEqual(lib.to_dict(parsed_response), ParsedRateResponse) + + +if __name__ == "__main__": + unittest.main() + + +RatePayload = { + "shipper": { + "company_name": "ABC Corp.", + "address_line1": "1098 N Fraser Street", + "city": "Georgetown", + "postal_code": "29440", + "country_code": "US", + "person_name": "Tall Tom", + "phone_number": "8005554526", + "state_code": "SC", + }, + "recipient": { + "company_name": "Coffee Five", + "address_line1": "R. da Quitanda, 86 - quiosque 01", + "city": "Centro", + "postal_code": "29440", + "country_code": "BR", + "person_name": "John", + "phone_number": "8005554526", + "state_code": "Rio de Janeiro", + }, + "parcels": [ + { + "height": 50, + "length": 50, + "weight": 20, + "width": 12, + "dimension_unit": "CM", + "weight_unit": "KG", + } + ], + "options": { + "usps_label_delivery_service": True, + "usps_price_type": "RETAIL", + "shipment_date": "2024-07-28", + }, + "services": ["usps_parcel_select"], + "reference": "REF-001", +} + +ParsedRateResponse = [ + [ + { + "carrier_id": "usps_rest_international", + "carrier_name": "usps_rest_international", + "currency": "USD", + "extra_charges": [ + {"amount": 3.35, "currency": "USD", "name": "Base Charge"}, + {"amount": 3.35, "currency": "USD", "name": "string"}, + {"amount": 3.35, "currency": "USD", "name": "Adult Signature Required"}, + ], + "meta": {"service_name": "usps_parcel_select", "zone": "01"}, + "service": "usps_parcel_select", + "total_charge": 3.35, + } + ], + [], +] + + +RateRequest = [ + { + "accountNumber": "Your Account Number", + "accountType": "EPS", + "destinationCountryCode": "BR", + "extraServices": [415], + "foreignPostalCode": "29440", + "height": 19.69, + "itemValue": 0.0, + "length": 19.69, + "mailClass": "PARCEL_SELECT", + "mailingDate": "2024-07-28", + "originZIPCode": "29440", + "priceType": "RETAIL", + "weight": 44.1, + "width": 4.72, + } +] + + +RateResponse = """{ + "rateOptions": [ + { + "totalBasePrice": 3.35, + "rates": [ + { + "SKU": "DPXX0XXXXX07200", + "description": "string", + "priceType": "RETAIL", + "price": 3.35, + "weight": 5, + "dimWeight": 5, + "fees": [ + { + "name": "string", + "SKU": "string", + "price": 0 + } + ], + "startDate": "2021-07-16", + "endDate": "2021-07-16", + "mailClass": "PARCEL_SELECT", + "zone": "01" + } + ], + "extraServices": [ + { + "extraService": "922", + "name": "Adult Signature Required", + "SKU": "DPXX0XXXXX07200", + "priceType": "RETAIL", + "price": 3.35, + "warnings": [ + { + "warningCode": "string", + "warningDescription": "string" + } + ] + } + ], + "totalPrice": 3.35 + } + ] +} +""" diff --git a/modules/connectors/usps_international/tests/usps_international/test_shipment.py b/modules/connectors/usps_rest_international/tests/usps_rest_international/test_shipment.py similarity index 93% rename from modules/connectors/usps_international/tests/usps_international/test_shipment.py rename to modules/connectors/usps_rest_international/tests/usps_rest_international/test_shipment.py index 18037a95cb..4b97f2495e 100644 --- a/modules/connectors/usps_international/tests/usps_international/test_shipment.py +++ b/modules/connectors/usps_rest_international/tests/usps_rest_international/test_shipment.py @@ -29,7 +29,7 @@ def test_create_cancel_shipment_request(self): self.assertEqual(request.serialize(), ShipmentCancelRequest) def test_create_shipment(self): - with patch("karrio.mappers.usps_international.proxy.lib.request") as mock: + with patch("karrio.mappers.usps_rest_international.proxy.lib.request") as mock: mock.return_value = "{}" karrio.Shipment.create(self.ShipmentRequest).from_(gateway) @@ -39,7 +39,7 @@ def test_create_shipment(self): ) def test_cancel_shipment(self): - with patch("karrio.mappers.usps_international.proxy.lib.request") as mock: + with patch("karrio.mappers.usps_rest_international.proxy.lib.request") as mock: mock.return_value = "{}" karrio.Shipment.cancel(self.ShipmentCancelRequest).from_(gateway) @@ -49,7 +49,7 @@ def test_cancel_shipment(self): ) def test_parse_shipment_response(self): - with patch("karrio.mappers.usps_international.proxy.lib.request") as mock: + with patch("karrio.mappers.usps_rest_international.proxy.lib.request") as mock: mock.return_value = ShipmentResponse parsed_response = ( karrio.Shipment.create(self.ShipmentRequest).from_(gateway).parse() @@ -58,7 +58,7 @@ def test_parse_shipment_response(self): self.assertListEqual(lib.to_dict(parsed_response), ParsedShipmentResponse) def test_parse_cancel_shipment_response(self): - with patch("karrio.mappers.usps_international.proxy.lib.request") as mock: + with patch("karrio.mappers.usps_rest_international.proxy.lib.request") as mock: mock.return_value = ShipmentCancelResponse parsed_response = ( karrio.Shipment.cancel(self.ShipmentCancelRequest) @@ -145,8 +145,8 @@ def test_parse_cancel_shipment_response(self): ParsedShipmentResponse = [ { - "carrier_id": "usps_international", - "carrier_name": "usps_international", + "carrier_id": "usps_rest_international", + "carrier_name": "usps_rest_international", "docs": {"label": ANY}, "label_type": "PDF", "meta": { @@ -161,8 +161,8 @@ def test_parse_cancel_shipment_response(self): ParsedCancelShipmentResponse = [ { - "carrier_id": "usps_international", - "carrier_name": "usps_international", + "carrier_id": "usps_rest_international", + "carrier_name": "usps_rest_international", "operation": "Cancel Shipment", "success": True, }, diff --git a/modules/connectors/usps_rest_international/tests/usps_rest_international/test_tracking.py b/modules/connectors/usps_rest_international/tests/usps_rest_international/test_tracking.py new file mode 100644 index 0000000000..1fdfeb2cc7 --- /dev/null +++ b/modules/connectors/usps_rest_international/tests/usps_rest_international/test_tracking.py @@ -0,0 +1,225 @@ +import unittest +from unittest.mock import patch, ANY +from .fixture import gateway +from tests import logger + +import karrio +import karrio.lib as lib +import karrio.core.models as models + + +class TestUSPSTracking(unittest.TestCase): + def setUp(self): + self.maxDiff = None + self.TrackingRequest = models.TrackingRequest(**TrackingPayload) + + def test_create_tracking_request(self): + request = gateway.mapper.create_tracking_request(self.TrackingRequest) + logger.debug(request.serialize()) + self.assertEqual(request.serialize(), TrackingRequest) + + def test_get_tracking(self): + with patch("karrio.mappers.usps_rest_international.proxy.lib.request") as mock: + mock.return_value = "{}" + karrio.Tracking.fetch(self.TrackingRequest).from_(gateway) + + self.assertEqual( + mock.call_args[1]["url"], + f"{gateway.settings.server_url}/v3/tracking/89108749065090", + ) + + def test_parse_tracking_response(self): + with patch("karrio.mappers.usps_rest_international.proxy.lib.request") as mock: + mock.return_value = TrackingResponse + parsed_response = ( + karrio.Tracking.fetch(self.TrackingRequest).from_(gateway).parse() + ) + logger.debug(lib.to_dict(parsed_response)) + self.assertListEqual(lib.to_dict(parsed_response), ParsedTrackingResponse) + + def test_parse_error_response(self): + with patch("karrio.mappers.usps_rest_international.proxy.lib.request") as mock: + mock.return_value = ErrorResponse + parsed_response = ( + karrio.Tracking.fetch(self.TrackingRequest).from_(gateway).parse() + ) + logger.debug(lib.to_dict(parsed_response)) + self.assertListEqual(lib.to_dict(parsed_response), ParsedErrorResponse) + + +if __name__ == "__main__": + unittest.main() + + +TrackingPayload = { + "tracking_numbers": ["89108749065090"], +} + +ParsedTrackingResponse = [ + [ + { + "carrier_id": "usps_rest_international", + "carrier_name": "usps_rest_international", + "delivered": False, + "estimated_delivery": "2019-08-24", + "events": [ + { + "code": "string", + "date": "2019-08-24", + "description": "string", + "location": "string, string, string, string", + "time": "14:15 PM", + } + ], + "info": { + "carrier_tracking_link": "https://tools.usps.com/go/TrackConfirmAction?tLabels=string", + "expected_delivery": "2019-08-24", + "shipment_destination_country": "string", + "shipment_destination_postal_code": "string", + "shipment_origin_country": "st", + "shipment_origin_postal_code": "strin", + "shipment_service": "string", + }, + "status": "in_transit", + "tracking_number": "string", + } + ], + [], +] + +ParsedErrorResponse = [ + [], + [ + { + "carrier_id": "usps_rest_international", + "carrier_name": "usps_rest_international", + "code": "string", + "details": { + "errors": [ + { + "code": "string", + "detail": "string", + "source": {"example": "string", "parameter": "string"}, + "status": "string", + "title": "string", + } + ], + "tracking_number": "89108749065090", + }, + "message": "string", + } + ], +] + + +TrackingRequest = ["89108749065090"] + +TrackingResponse = """{ + "trackingNumber": "string", + "additionalInfo": "string", + "ADPScripting": "string", + "archiveRestoreInfo": "string", + "associatedLabel": "string", + "carrierRelease": true, + "mailClass": "BOUND_PRINTED_MATTER", + "destinationCity": "string", + "destinationCountryCode": "string", + "destinationState": "st", + "destinationZIP": "string", + "editedLabelId": "string", + "emailEnabled": true, + "endOfDay": "string", + "eSOFEligible": true, + "expectedDeliveryTimeStamp": "2019-08-24T14:15:22Z", + "expectedDeliveryType": "string", + "guaranteedDeliveryTimeStamp": "2019-08-24T14:15:22Z", + "guaranteedDetails": "string", + "itemShape": "LETTER", + "kahalaIndicator": true, + "mailType": "INTERNATIONAL_INBOUND", + "approximateIntakeDate": "string", + "uniqueTrackingId": "string", + "onTime": true, + "originCity": "string", + "originCountry": "st", + "originState": "str", + "originZIP": "strin", + "proofOfDeliveryEnabled": true, + "predictedDeliveryTimeStamp": "2019-08-24T14:15:22Z", + "predictedDeliveryDate": "2019-08-24", + "predictedDeliveryWindowStartTime": "string", + "predictedDeliveryWindowEndTime": "string", + "relatedReturnReceiptID": "string", + "redeliveryEnabled": true, + "enabledNotificationRequests": { + "SMS": { + "futureDelivery": true, + "alertDelivery": true, + "todayDelivery": true, + "UP": true, + "DND": true + }, + "EMail": { + "futureDelivery": true, + "alertDelivery": true, + "todayDelivery": true, + "UP": true, + "DND": true, + "firstDisplayable": true, + "otherActivity": true + } + }, + "restoreEnabled": true, + "returnDateNotice": "2019-08-24", + "RRAMenabled": true, + "RREEnabled": true, + "services": ["string"], + "serviceTypeCode": "string", + "status": "string", + "statusCategory": "string", + "statusSummary": "Your item was delivered at 12:55 pm on April 05, 2010 in FALMOUTH, MA 02540", + "trackingProofOfDeliveryEnabled": true, + "valueofArticle": "string", + "extendRetentionPurchasedCode": "string", + "extendRetentionExtraServiceCodeOptions": [{}], + "trackingEvents": [ + { + "eventType": "string", + "eventTimestamp": "2019-08-24T14:15:22Z", + "GMTTimestamp": "2024-04-04T14:03:12.041Z", + "GMTOffset": "-7:00", + "eventCountry": "string", + "eventCity": "string", + "eventState": "string", + "eventZIP": "string", + "firm": "string", + "name": "string", + "authorizedAgent": true, + "eventCode": "string", + "actionCode": "string", + "reasonCode": "string" + } + ] +} +""" + +ErrorResponse = """{ + "apiVersion": "string", + "error": { + "code": "string", + "message": "string", + "errors": [ + { + "status": "string", + "code": "string", + "title": "string", + "detail": "string", + "source": { + "parameter": "string", + "example": "string" + } + } + ] + } +} +""" diff --git a/modules/core/karrio/server/providers/extension/models/eshipper_xml.py b/modules/core/karrio/server/providers/extension/models/eshipper_xml.py new file mode 100644 index 0000000000..dc7dd02833 --- /dev/null +++ b/modules/core/karrio/server/providers/extension/models/eshipper_xml.py @@ -0,0 +1,21 @@ +import django.db.models as models +import karrio.server.providers.models as providers + + +class EShipperXMLSettings(providers.Carrier): + CARRIER_NAME = "eshipper_xml" + + class Meta: + db_table = "eshipper-xml-settings" + verbose_name = "eShipper XML Settings" + verbose_name_plural = "eShipper XML Settings" + + username = models.CharField(max_length=200) + password = models.CharField(max_length=200) + + @property + def carrier_name(self) -> str: + return self.CARRIER_NAME + + +SETTINGS = EShipperXMLSettings diff --git a/modules/core/karrio/server/providers/extension/models/usps.py b/modules/core/karrio/server/providers/extension/models/usps.py index 5d8b0d548e..0e940741fe 100644 --- a/modules/core/karrio/server/providers/extension/models/usps.py +++ b/modules/core/karrio/server/providers/extension/models/usps.py @@ -1,23 +1,24 @@ -import django.db.models as models -import karrio.server.providers.models as providers +from django.db import models +from karrio.server.providers.models.carrier import Carrier -class USPSSettings(providers.Carrier): - CARRIER_NAME = "usps" - +class USPSSettings(Carrier): class Meta: db_table = "usps-settings" verbose_name = "USPS Settings" verbose_name_plural = "USPS Settings" - client_id = models.CharField(max_length=100) - client_secret = models.CharField(max_length=100) - account_type = models.CharField(max_length=100, null=True, blank=True) - account_number = models.CharField(max_length=100, blank=True, null=True) + username = models.CharField(max_length=200) + password = models.CharField(max_length=200) + mailer_id = models.CharField(max_length=200, null=True, blank=True) + customer_registration_id = models.CharField(max_length=200, blank=True, null=True) + logistics_manager_mailer_id = models.CharField( + max_length=200, blank=True, null=True + ) @property def carrier_name(self) -> str: - return self.CARRIER_NAME + return "usps" SETTINGS = USPSSettings diff --git a/modules/core/karrio/server/providers/extension/models/usps_international.py b/modules/core/karrio/server/providers/extension/models/usps_international.py index 055ff3cdef..a819451920 100644 --- a/modules/core/karrio/server/providers/extension/models/usps_international.py +++ b/modules/core/karrio/server/providers/extension/models/usps_international.py @@ -3,21 +3,20 @@ class USPSInternationalSettings(Carrier): - CARRIER_NAME = "usps_international" - class Meta: db_table = "usps_international-settings" - verbose_name = "USPS International Settings" - verbose_name_plural = "USPS International Settings" + verbose_name = 'USPS International Settings' + verbose_name_plural = 'USPS International Settings' - client_id = models.CharField(max_length=100) - client_secret = models.CharField(max_length=100) - account_type = models.CharField(max_length=200, null=True, blank=True) - account_number = models.CharField(max_length=200, blank=True, null=True) + username = models.CharField(max_length=200) + password = models.CharField(max_length=200) + mailer_id = models.CharField(max_length=200, null=True, blank=True) + customer_registration_id = models.CharField(max_length=200, blank=True, null=True) + logistics_manager_mailer_id = models.CharField(max_length=200, blank=True, null=True) @property def carrier_name(self) -> str: - return self.CARRIER_NAME + return 'usps_international' SETTINGS = USPSInternationalSettings diff --git a/modules/core/karrio/server/providers/extension/models/usps_rest.py b/modules/core/karrio/server/providers/extension/models/usps_rest.py new file mode 100644 index 0000000000..c9e11aef67 --- /dev/null +++ b/modules/core/karrio/server/providers/extension/models/usps_rest.py @@ -0,0 +1,23 @@ +import django.db.models as models +import karrio.server.providers.models as providers + + +class USPSRESTSettings(providers.Carrier): + CARRIER_NAME = "usps_rest" + + class Meta: + db_table = "usps-rest-settings" + verbose_name = "USPS REST Settings" + verbose_name_plural = "USPS REST Settings" + + client_id = models.CharField(max_length=100) + client_secret = models.CharField(max_length=100) + account_type = models.CharField(max_length=100, null=True, blank=True) + account_number = models.CharField(max_length=100, blank=True, null=True) + + @property + def carrier_name(self) -> str: + return self.CARRIER_NAME + + +SETTINGS = USPSRESTSettings diff --git a/modules/core/karrio/server/providers/extension/models/usps_rest_international.py b/modules/core/karrio/server/providers/extension/models/usps_rest_international.py new file mode 100644 index 0000000000..96492531d0 --- /dev/null +++ b/modules/core/karrio/server/providers/extension/models/usps_rest_international.py @@ -0,0 +1,23 @@ +from django.db import models +from karrio.server.providers.models.carrier import Carrier + + +class USPSRESTInternationalSettings(Carrier): + CARRIER_NAME = "usps_rest_international" + + class Meta: + db_table = "usps-rest-international-settings" + verbose_name = "USPS REST International Settings" + verbose_name_plural = "USPS REST International Settings" + + client_id = models.CharField(max_length=100) + client_secret = models.CharField(max_length=100) + account_type = models.CharField(max_length=200, null=True, blank=True) + account_number = models.CharField(max_length=200, blank=True, null=True) + + @property + def carrier_name(self) -> str: + return self.CARRIER_NAME + + +SETTINGS = USPSRESTInternationalSettings diff --git a/modules/core/karrio/server/providers/migrations/0077_eshipperxmlsettings_uspsrestinternationalsettings_and_more.py b/modules/core/karrio/server/providers/migrations/0077_eshipperxmlsettings_uspsrestinternationalsettings_and_more.py new file mode 100644 index 0000000000..d31d39567a --- /dev/null +++ b/modules/core/karrio/server/providers/migrations/0077_eshipperxmlsettings_uspsrestinternationalsettings_and_more.py @@ -0,0 +1,184 @@ +# Generated by Django 4.2.14 on 2024-07-30 01:50 + +from django.db import migrations, models +import django.db.models.deletion +import django.utils.timezone + + +class Migration(migrations.Migration): + + dependencies = [ + ( + "providers", + "0076_rename_customer_registration_id_uspsinternationalsettings_account_number_and_more", + ), + ] + + operations = [ + migrations.CreateModel( + name="EShipperXMLSettings", + fields=[ + ( + "carrier_ptr", + models.OneToOneField( + auto_created=True, + on_delete=django.db.models.deletion.CASCADE, + parent_link=True, + primary_key=True, + serialize=False, + to="providers.carrier", + ), + ), + ("username", models.CharField(max_length=200)), + ("password", models.CharField(max_length=200)), + ], + options={ + "verbose_name": "eShipper XML Settings", + "verbose_name_plural": "eShipper XML Settings", + "db_table": "eshipper-xml-settings", + }, + bases=("providers.carrier",), + ), + migrations.CreateModel( + name="USPSRESTInternationalSettings", + fields=[ + ( + "carrier_ptr", + models.OneToOneField( + auto_created=True, + on_delete=django.db.models.deletion.CASCADE, + parent_link=True, + primary_key=True, + serialize=False, + to="providers.carrier", + ), + ), + ("client_id", models.CharField(max_length=100)), + ("client_secret", models.CharField(max_length=100)), + ( + "account_type", + models.CharField(blank=True, max_length=200, null=True), + ), + ( + "account_number", + models.CharField(blank=True, max_length=200, null=True), + ), + ], + options={ + "verbose_name": "USPS REST International Settings", + "verbose_name_plural": "USPS REST International Settings", + "db_table": "usps-rest-international-settings", + }, + bases=("providers.carrier",), + ), + migrations.CreateModel( + name="USPSRESTSettings", + fields=[ + ( + "carrier_ptr", + models.OneToOneField( + auto_created=True, + on_delete=django.db.models.deletion.CASCADE, + parent_link=True, + primary_key=True, + serialize=False, + to="providers.carrier", + ), + ), + ("client_id", models.CharField(max_length=100)), + ("client_secret", models.CharField(max_length=100)), + ( + "account_type", + models.CharField(blank=True, max_length=100, null=True), + ), + ( + "account_number", + models.CharField(blank=True, max_length=100, null=True), + ), + ], + options={ + "verbose_name": "USPS REST Settings", + "verbose_name_plural": "USPS REST Settings", + "db_table": "usps-rest-settings", + }, + bases=("providers.carrier",), + ), + migrations.RenameField( + model_name="uspsinternationalsettings", + old_name="account_number", + new_name="customer_registration_id", + ), + migrations.RenameField( + model_name="uspsinternationalsettings", + old_name="account_type", + new_name="logistics_manager_mailer_id", + ), + migrations.RemoveField( + model_name="uspsinternationalsettings", + name="client_id", + ), + migrations.RemoveField( + model_name="uspsinternationalsettings", + name="client_secret", + ), + migrations.RemoveField( + model_name="uspssettings", + name="account_number", + ), + migrations.RemoveField( + model_name="uspssettings", + name="account_type", + ), + migrations.RemoveField( + model_name="uspssettings", + name="client_id", + ), + migrations.RemoveField( + model_name="uspssettings", + name="client_secret", + ), + migrations.AddField( + model_name="uspsinternationalsettings", + name="mailer_id", + field=models.CharField(blank=True, max_length=200, null=True), + ), + migrations.AddField( + model_name="uspsinternationalsettings", + name="password", + field=models.CharField(default=django.utils.timezone.now, max_length=200), + preserve_default=False, + ), + migrations.AddField( + model_name="uspsinternationalsettings", + name="username", + field=models.CharField(default=django.utils.timezone.now, max_length=200), + preserve_default=False, + ), + migrations.AddField( + model_name="uspssettings", + name="customer_registration_id", + field=models.CharField(blank=True, max_length=200, null=True), + ), + migrations.AddField( + model_name="uspssettings", + name="logistics_manager_mailer_id", + field=models.CharField(blank=True, max_length=200, null=True), + ), + migrations.AddField( + model_name="uspssettings", + name="mailer_id", + field=models.CharField(blank=True, max_length=200, null=True), + ), + migrations.AddField( + model_name="uspssettings", + name="password", + field=models.CharField(default=django.utils.timezone.now, max_length=200), + preserve_default=False, + ), + migrations.AddField( + model_name="uspssettings", + name="username", + field=models.CharField(default=django.utils.timezone.now, max_length=200), + preserve_default=False, + ), + ] diff --git a/modules/core/karrio/server/providers/migrations/0078_auto_20240730_0153.py b/modules/core/karrio/server/providers/migrations/0078_auto_20240730_0153.py new file mode 100644 index 0000000000..40ab0740fd --- /dev/null +++ b/modules/core/karrio/server/providers/migrations/0078_auto_20240730_0153.py @@ -0,0 +1,40 @@ +# Generated by Django 4.2.14 on 2024-07-30 01:53 + +from ast import alias +from django.utils import timezone +from django.db import migrations, transaction + + +@transaction.atomic +def forwards_func(apps, schema_editor): + import karrio.server.providers.models as providers + + db_alias = schema_editor.connection.alias + + carriers = providers.Carrier.objects.using(db_alias).all().distinct().iterator() + + for _carrier in [_ for _ in carriers if _.settings is None]: + _settings = providers.MODELS["eshipper_xml"]( + pk=_carrier.pk, + username=str(timezone.now()), + password=str(timezone.now()), + ) + _settings.save_base(raw=True, using=db_alias) + + +def reverse_func(apps, schema_editor): + pass + + +class Migration(migrations.Migration): + + dependencies = [ + ( + "providers", + "0077_eshipperxmlsettings_uspsrestinternationalsettings_and_more", + ), + ] + + operations = [ + migrations.RunPython(forwards_func, reverse_func), + ] diff --git a/modules/pricing/karrio/server/pricing/migrations/0056_alter_surcharge_carriers_alter_surcharge_services.py b/modules/pricing/karrio/server/pricing/migrations/0056_alter_surcharge_carriers_alter_surcharge_services.py new file mode 100644 index 0000000000..952833f615 --- /dev/null +++ b/modules/pricing/karrio/server/pricing/migrations/0056_alter_surcharge_carriers_alter_surcharge_services.py @@ -0,0 +1,3899 @@ +# Generated by Django 4.2.14 on 2024-07-30 01:47 + +from django.db import migrations +import karrio.server.core.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ("pricing", "0055_alter_surcharge_services"), + ] + + operations = [ + migrations.AlterField( + model_name="surcharge", + name="carriers", + field=karrio.server.core.fields.MultiChoiceField( + blank=True, + choices=[ + ("allied_express", "allied_express"), + ("allied_express_local", "allied_express_local"), + ("amazon_shipping", "amazon_shipping"), + ("aramex", "aramex"), + ("asendia_us", "asendia_us"), + ("australiapost", "australiapost"), + ("boxknight", "boxknight"), + ("bpost", "bpost"), + ("canadapost", "canadapost"), + ("canpar", "canpar"), + ("chronopost", "chronopost"), + ("colissimo", "colissimo"), + ("dhl_express", "dhl_express"), + ("dhl_parcel_de", "dhl_parcel_de"), + ("dhl_poland", "dhl_poland"), + ("dhl_universal", "dhl_universal"), + ("dicom", "dicom"), + ("dpd", "dpd"), + ("dpdhl", "dpdhl"), + ("easypost", "easypost"), + ("eshipper", "eshipper"), + ("eshipper_xml", "eshipper_xml"), + ("fedex", "fedex"), + ("fedex_ws", "fedex_ws"), + ("freightcom", "freightcom"), + ("generic", "generic"), + ("geodis", "geodis"), + ("hay_post", "hay_post"), + ("laposte", "laposte"), + ("locate2u", "locate2u"), + ("nationex", "nationex"), + ("purolator", "purolator"), + ("roadie", "roadie"), + ("royalmail", "royalmail"), + ("sendle", "sendle"), + ("tge", "tge"), + ("tnt", "tnt"), + ("ups", "ups"), + ("usps", "usps"), + ("usps_international", "usps_international"), + ("usps_rest", "usps_rest"), + ("usps_rest_international", "usps_rest_international"), + ("zoom2u", "zoom2u"), + ], + help_text="\n The list of carriers you want to apply the surcharge to.\n
\n Note that by default, the surcharge is applied to all carriers\n ", + null=True, + ), + ), + migrations.AlterField( + model_name="surcharge", + name="services", + field=karrio.server.core.fields.MultiChoiceField( + blank=True, + choices=[ + ("allied_road_service", "allied_road_service"), + ("allied_parcel_service", "allied_parcel_service"), + ( + "allied_standard_pallet_service", + "allied_standard_pallet_service", + ), + ( + "allied_oversized_pallet_service", + "allied_oversized_pallet_service", + ), + ("allied_road_service", "allied_road_service"), + ("allied_parcel_service", "allied_parcel_service"), + ( + "allied_standard_pallet_service", + "allied_standard_pallet_service", + ), + ( + "allied_oversized_pallet_service", + "allied_oversized_pallet_service", + ), + ("allied_local_normal_service", "allied_local_normal_service"), + ("allied_local_vip_service", "allied_local_vip_service"), + ( + "allied_local_executive_service", + "allied_local_executive_service", + ), + ("allied_local_gold_service", "allied_local_gold_service"), + ("amazon_shipping_ground", "amazon_shipping_ground"), + ("amazon_shipping_standard", "amazon_shipping_standard"), + ("amazon_shipping_premium", "amazon_shipping_premium"), + ("asendia_us_e_com_tracked_ddp", "asendia_us_e_com_tracked_ddp"), + ("asendia_us_fully_tracked", "asendia_us_fully_tracked"), + ("asendia_us_country_tracked", "asendia_us_country_tracked"), + ("australiapost_parcel_post", "australiapost_parcel_post"), + ("australiapost_express_post", "australiapost_express_post"), + ( + "australiapost_parcel_post_signature", + "australiapost_parcel_post_signature", + ), + ( + "australiapost_express_post_signature", + "australiapost_express_post_signature", + ), + ( + "australiapost_intl_standard_pack_track", + "australiapost_intl_standard_pack_track", + ), + ( + "australiapost_intl_standard_with_signature", + "australiapost_intl_standard_with_signature", + ), + ( + "australiapost_intl_express_merch", + "australiapost_intl_express_merch", + ), + ( + "australiapost_intl_express_docs", + "australiapost_intl_express_docs", + ), + ( + "australiapost_eparcel_post_returns", + "australiapost_eparcel_post_returns", + ), + ( + "australiapost_express_eparcel_post_returns", + "australiapost_express_eparcel_post_returns", + ), + ("boxknight_sameday", "boxknight_sameday"), + ("boxknight_nextday", "boxknight_nextday"), + ("boxknight_scheduled", "boxknight_scheduled"), + ("bpack_24h_pro", "bpack_24h_pro"), + ("bpack_24h_business", "bpack_24h_business"), + ("bpack_bus", "bpack_bus"), + ("bpack_pallet", "bpack_pallet"), + ("bpack_easy_retour", "bpack_easy_retour"), + ("bpack_xl", "bpack_xl"), + ("bpack_bpost", "bpack_bpost"), + ("bpack_24_7", "bpack_24_7"), + ("bpack_world_business", "bpack_world_business"), + ("bpack_world_express_pro", "bpack_world_express_pro"), + ("bpack_europe_business", "bpack_europe_business"), + ("bpack_world_easy_return", "bpack_world_easy_return"), + ("bpack_bpost_international", "bpack_bpost_international"), + ("bpack_24_7_international", "bpack_24_7_international"), + ("canadapost_regular_parcel", "canadapost_regular_parcel"), + ("canadapost_expedited_parcel", "canadapost_expedited_parcel"), + ("canadapost_xpresspost", "canadapost_xpresspost"), + ( + "canadapost_xpresspost_certified", + "canadapost_xpresspost_certified", + ), + ("canadapost_priority", "canadapost_priority"), + ("canadapost_library_books", "canadapost_library_books"), + ( + "canadapost_expedited_parcel_usa", + "canadapost_expedited_parcel_usa", + ), + ( + "canadapost_priority_worldwide_envelope_usa", + "canadapost_priority_worldwide_envelope_usa", + ), + ( + "canadapost_priority_worldwide_pak_usa", + "canadapost_priority_worldwide_pak_usa", + ), + ( + "canadapost_priority_worldwide_parcel_usa", + "canadapost_priority_worldwide_parcel_usa", + ), + ( + "canadapost_small_packet_usa_air", + "canadapost_small_packet_usa_air", + ), + ("canadapost_tracked_packet_usa", "canadapost_tracked_packet_usa"), + ( + "canadapost_tracked_packet_usa_lvm", + "canadapost_tracked_packet_usa_lvm", + ), + ("canadapost_xpresspost_usa", "canadapost_xpresspost_usa"), + ( + "canadapost_xpresspost_international", + "canadapost_xpresspost_international", + ), + ( + "canadapost_international_parcel_air", + "canadapost_international_parcel_air", + ), + ( + "canadapost_international_parcel_surface", + "canadapost_international_parcel_surface", + ), + ( + "canadapost_priority_worldwide_envelope_intl", + "canadapost_priority_worldwide_envelope_intl", + ), + ( + "canadapost_priority_worldwide_pak_intl", + "canadapost_priority_worldwide_pak_intl", + ), + ( + "canadapost_priority_worldwide_parcel_intl", + "canadapost_priority_worldwide_parcel_intl", + ), + ( + "canadapost_small_packet_international_air", + "canadapost_small_packet_international_air", + ), + ( + "canadapost_small_packet_international_surface", + "canadapost_small_packet_international_surface", + ), + ( + "canadapost_tracked_packet_international", + "canadapost_tracked_packet_international", + ), + ("chronopost_retrait_bureau", "chronopost_retrait_bureau"), + ("chronopost_13", "chronopost_13"), + ("chronopost_10", "chronopost_10"), + ("chronopost_18", "chronopost_18"), + ("chronopost_relais", "chronopost_relais"), + ( + "chronopost_express_international", + "chronopost_express_international", + ), + ( + "chronopost_premium_international", + "chronopost_premium_international", + ), + ( + "chronopost_classic_international", + "chronopost_classic_international", + ), + ( + "colissimo_home_without_signature", + "colissimo_home_without_signature", + ), + ("colissimo_home_with_signature", "colissimo_home_with_signature"), + ("colissimo_eco_france", "colissimo_eco_france"), + ("colissimo_return_france", "colissimo_return_france"), + ( + "colissimo_flash_without_signature", + "colissimo_flash_without_signature", + ), + ( + "colissimo_flash_with_signature", + "colissimo_flash_with_signature", + ), + ( + "colissimo_oversea_home_without_signature", + "colissimo_oversea_home_without_signature", + ), + ( + "colissimo_oversea_home_with_signature", + "colissimo_oversea_home_with_signature", + ), + ( + "colissimo_eco_om_without_signature", + "colissimo_eco_om_without_signature", + ), + ( + "colissimo_eco_om_with_signature", + "colissimo_eco_om_with_signature", + ), + ("colissimo_retour_om", "colissimo_retour_om"), + ( + "colissimo_return_international_from_france", + "colissimo_return_international_from_france", + ), + ( + "colissimo_economical_big_export_offer", + "colissimo_economical_big_export_offer", + ), + ( + "colissimo_out_of_home_national_international", + "colissimo_out_of_home_national_international", + ), + ("dhl_logistics_services", "dhl_logistics_services"), + ("dhl_domestic_express_12_00", "dhl_domestic_express_12_00"), + ("dhl_express_choice", "dhl_express_choice"), + ("dhl_express_choice_nondoc", "dhl_express_choice_nondoc"), + ("dhl_jetline", "dhl_jetline"), + ("dhl_sprintline", "dhl_sprintline"), + ("dhl_air_capacity_sales", "dhl_air_capacity_sales"), + ("dhl_express_easy", "dhl_express_easy"), + ("dhl_express_easy_nondoc", "dhl_express_easy_nondoc"), + ("dhl_parcel_product", "dhl_parcel_product"), + ("dhl_accounting", "dhl_accounting"), + ("dhl_breakbulk_express", "dhl_breakbulk_express"), + ("dhl_medical_express", "dhl_medical_express"), + ("dhl_express_worldwide_doc", "dhl_express_worldwide_doc"), + ("dhl_express_9_00_nondoc", "dhl_express_9_00_nondoc"), + ("dhl_freight_worldwide_nondoc", "dhl_freight_worldwide_nondoc"), + ("dhl_economy_select_domestic", "dhl_economy_select_domestic"), + ("dhl_economy_select_nondoc", "dhl_economy_select_nondoc"), + ("dhl_express_domestic_9_00", "dhl_express_domestic_9_00"), + ("dhl_jumbo_box_nondoc", "dhl_jumbo_box_nondoc"), + ("dhl_express_9_00", "dhl_express_9_00"), + ("dhl_express_10_30", "dhl_express_10_30"), + ("dhl_express_10_30_nondoc", "dhl_express_10_30_nondoc"), + ("dhl_express_domestic", "dhl_express_domestic"), + ("dhl_express_domestic_10_30", "dhl_express_domestic_10_30"), + ("dhl_express_worldwide_nondoc", "dhl_express_worldwide_nondoc"), + ("dhl_medical_express_nondoc", "dhl_medical_express_nondoc"), + ("dhl_globalmail", "dhl_globalmail"), + ("dhl_same_day", "dhl_same_day"), + ("dhl_express_12_00", "dhl_express_12_00"), + ("dhl_express_worldwide", "dhl_express_worldwide"), + ("dhl_parcel_product_nondoc", "dhl_parcel_product_nondoc"), + ("dhl_economy_select", "dhl_economy_select"), + ("dhl_express_envelope", "dhl_express_envelope"), + ("dhl_express_12_00_nondoc", "dhl_express_12_00_nondoc"), + ("dhl_destination_charges", "dhl_destination_charges"), + ("dhl_express_all", "dhl_express_all"), + ("dhl_parcel_de_paket", "dhl_parcel_de_paket"), + ("dhl_parcel_de_warenpost", "dhl_parcel_de_warenpost"), + ("dhl_parcel_de_europaket", "dhl_parcel_de_europaket"), + ( + "dhl_parcel_de_paket_international", + "dhl_parcel_de_paket_international", + ), + ( + "dhl_parcel_de_warenpost_international", + "dhl_parcel_de_warenpost_international", + ), + ("dhl_poland_premium", "dhl_poland_premium"), + ("dhl_poland_polska", "dhl_poland_polska"), + ("dhl_poland_09", "dhl_poland_09"), + ("dhl_poland_12", "dhl_poland_12"), + ("dhl_poland_connect", "dhl_poland_connect"), + ("dhl_poland_international", "dhl_poland_international"), + ("dpd_cl", "dpd_cl"), + ("dpd_express_10h", "dpd_express_10h"), + ("dpd_express_12h", "dpd_express_12h"), + ("dpd_express_18h_guarantee", "dpd_express_18h_guarantee"), + ("dpd_express_b2b_predict", "dpd_express_b2b_predict"), + ("dpdhl_paket", "dpdhl_paket"), + ("dpdhl_paket_international", "dpdhl_paket_international"), + ("dpdhl_europaket", "dpdhl_europaket"), + ("dpdhl_paket_connect", "dpdhl_paket_connect"), + ("dpdhl_warenpost", "dpdhl_warenpost"), + ("dpdhl_warenpost_international", "dpdhl_warenpost_international"), + ("dpdhl_retoure", "dpdhl_retoure"), + ("easypost_amazonmws_ups_rates", "easypost_amazonmws_ups_rates"), + ("easypost_amazonmws_usps_rates", "easypost_amazonmws_usps_rates"), + ( + "easypost_amazonmws_fedex_rates", + "easypost_amazonmws_fedex_rates", + ), + ("easypost_amazonmws_ups_labels", "easypost_amazonmws_ups_labels"), + ( + "easypost_amazonmws_usps_labels", + "easypost_amazonmws_usps_labels", + ), + ( + "easypost_amazonmws_fedex_labels", + "easypost_amazonmws_fedex_labels", + ), + ( + "easypost_amazonmws_ups_tracking", + "easypost_amazonmws_ups_tracking", + ), + ( + "easypost_amazonmws_usps_tracking", + "easypost_amazonmws_usps_tracking", + ), + ( + "easypost_amazonmws_fedex_tracking", + "easypost_amazonmws_fedex_tracking", + ), + ( + "easypost_apc_parcel_connect_book_service", + "easypost_apc_parcel_connect_book_service", + ), + ( + "easypost_apc_parcel_connect_expedited_ddp", + "easypost_apc_parcel_connect_expedited_ddp", + ), + ( + "easypost_apc_parcel_connect_expedited_ddu", + "easypost_apc_parcel_connect_expedited_ddu", + ), + ( + "easypost_apc_parcel_connect_priority_ddp", + "easypost_apc_parcel_connect_priority_ddp", + ), + ( + "easypost_apc_parcel_connect_priority_ddp_delcon", + "easypost_apc_parcel_connect_priority_ddp_delcon", + ), + ( + "easypost_apc_parcel_connect_priority_ddu", + "easypost_apc_parcel_connect_priority_ddu", + ), + ( + "easypost_apc_parcel_connect_priority_ddu_delcon", + "easypost_apc_parcel_connect_priority_ddu_delcon", + ), + ( + "easypost_apc_parcel_connect_priority_ddupqw", + "easypost_apc_parcel_connect_priority_ddupqw", + ), + ( + "easypost_apc_parcel_connect_standard_ddu", + "easypost_apc_parcel_connect_standard_ddu", + ), + ( + "easypost_apc_parcel_connect_standard_ddupqw", + "easypost_apc_parcel_connect_standard_ddupqw", + ), + ( + "easypost_apc_parcel_connect_packet_ddu", + "easypost_apc_parcel_connect_packet_ddu", + ), + ("easypost_asendia_pmi", "easypost_asendia_pmi"), + ("easypost_asendia_e_packet", "easypost_asendia_e_packet"), + ("easypost_asendia_ipa", "easypost_asendia_ipa"), + ("easypost_asendia_isal", "easypost_asendia_isal"), + ("easypost_asendia_us_ads", "easypost_asendia_us_ads"), + ( + "easypost_asendia_us_air_freight_inbound", + "easypost_asendia_us_air_freight_inbound", + ), + ( + "easypost_asendia_us_air_freight_outbound", + "easypost_asendia_us_air_freight_outbound", + ), + ( + "easypost_asendia_us_domestic_bound_printer_matter_expedited", + "easypost_asendia_us_domestic_bound_printer_matter_expedited", + ), + ( + "easypost_asendia_us_domestic_bound_printer_matter_ground", + "easypost_asendia_us_domestic_bound_printer_matter_ground", + ), + ( + "easypost_asendia_us_domestic_flats_expedited", + "easypost_asendia_us_domestic_flats_expedited", + ), + ( + "easypost_asendia_us_domestic_flats_ground", + "easypost_asendia_us_domestic_flats_ground", + ), + ( + "easypost_asendia_us_domestic_parcel_ground_over1lb", + "easypost_asendia_us_domestic_parcel_ground_over1lb", + ), + ( + "easypost_asendia_us_domestic_parcel_ground_under1lb", + "easypost_asendia_us_domestic_parcel_ground_under1lb", + ), + ( + "easypost_asendia_us_domestic_parcel_max_over1lb", + "easypost_asendia_us_domestic_parcel_max_over1lb", + ), + ( + "easypost_asendia_us_domestic_parcel_max_under1lb", + "easypost_asendia_us_domestic_parcel_max_under1lb", + ), + ( + "easypost_asendia_us_domestic_parcel_over1lb_expedited", + "easypost_asendia_us_domestic_parcel_over1lb_expedited", + ), + ( + "easypost_asendia_us_domestic_parcel_under1lb_expedited", + "easypost_asendia_us_domestic_parcel_under1lb_expedited", + ), + ( + "easypost_asendia_us_domestic_promo_parcel_expedited", + "easypost_asendia_us_domestic_promo_parcel_expedited", + ), + ( + "easypost_asendia_us_domestic_promo_parcel_ground", + "easypost_asendia_us_domestic_promo_parcel_ground", + ), + ( + "easypost_asendia_us_bulk_freight", + "easypost_asendia_us_bulk_freight", + ), + ( + "easypost_asendia_us_business_mail_canada_lettermail", + "easypost_asendia_us_business_mail_canada_lettermail", + ), + ( + "easypost_asendia_us_business_mail_canada_lettermail_machineable", + "easypost_asendia_us_business_mail_canada_lettermail_machineable", + ), + ( + "easypost_asendia_us_business_mail_economy", + "easypost_asendia_us_business_mail_economy", + ), + ( + "easypost_asendia_us_business_mail_economy_lp_wholesale", + "easypost_asendia_us_business_mail_economy_lp_wholesale", + ), + ( + "easypost_asendia_us_business_mail_economy_sp_wholesale", + "easypost_asendia_us_business_mail_economy_sp_wholesale", + ), + ( + "easypost_asendia_us_business_mail_ipa", + "easypost_asendia_us_business_mail_ipa", + ), + ( + "easypost_asendia_us_business_mail_isal", + "easypost_asendia_us_business_mail_isal", + ), + ( + "easypost_asendia_us_business_mail_priority", + "easypost_asendia_us_business_mail_priority", + ), + ( + "easypost_asendia_us_business_mail_priority_lp_wholesale", + "easypost_asendia_us_business_mail_priority_lp_wholesale", + ), + ( + "easypost_asendia_us_business_mail_priority_sp_wholesale", + "easypost_asendia_us_business_mail_priority_sp_wholesale", + ), + ( + "easypost_asendia_us_marketing_mail_canada_personalized_lcp", + "easypost_asendia_us_marketing_mail_canada_personalized_lcp", + ), + ( + "easypost_asendia_us_marketing_mail_canada_personalized_machineable", + "easypost_asendia_us_marketing_mail_canada_personalized_machineable", + ), + ( + "easypost_asendia_us_marketing_mail_canada_personalized_ndg", + "easypost_asendia_us_marketing_mail_canada_personalized_ndg", + ), + ( + "easypost_asendia_us_marketing_mail_economy", + "easypost_asendia_us_marketing_mail_economy", + ), + ( + "easypost_asendia_us_marketing_mail_ipa", + "easypost_asendia_us_marketing_mail_ipa", + ), + ( + "easypost_asendia_us_marketing_mail_isal", + "easypost_asendia_us_marketing_mail_isal", + ), + ( + "easypost_asendia_us_marketing_mail_priority", + "easypost_asendia_us_marketing_mail_priority", + ), + ( + "easypost_asendia_us_publications_canada_lcp", + "easypost_asendia_us_publications_canada_lcp", + ), + ( + "easypost_asendia_us_publications_canada_ndg", + "easypost_asendia_us_publications_canada_ndg", + ), + ( + "easypost_asendia_us_publications_economy", + "easypost_asendia_us_publications_economy", + ), + ( + "easypost_asendia_us_publications_ipa", + "easypost_asendia_us_publications_ipa", + ), + ( + "easypost_asendia_us_publications_isal", + "easypost_asendia_us_publications_isal", + ), + ( + "easypost_asendia_us_publications_priority", + "easypost_asendia_us_publications_priority", + ), + ( + "easypost_asendia_us_epaq_elite", + "easypost_asendia_us_epaq_elite", + ), + ( + "easypost_asendia_us_epaq_elite_custom", + "easypost_asendia_us_epaq_elite_custom", + ), + ( + "easypost_asendia_us_epaq_elite_dap", + "easypost_asendia_us_epaq_elite_dap", + ), + ( + "easypost_asendia_us_epaq_elite_ddp", + "easypost_asendia_us_epaq_elite_ddp", + ), + ( + "easypost_asendia_us_epaq_elite_ddp_oversized", + "easypost_asendia_us_epaq_elite_ddp_oversized", + ), + ( + "easypost_asendia_us_epaq_elite_dpd", + "easypost_asendia_us_epaq_elite_dpd", + ), + ( + "easypost_asendia_us_epaq_elite_direct_access_canada_ddp", + "easypost_asendia_us_epaq_elite_direct_access_canada_ddp", + ), + ( + "easypost_asendia_us_epaq_elite_oversized", + "easypost_asendia_us_epaq_elite_oversized", + ), + ("easypost_asendia_us_epaq_plus", "easypost_asendia_us_epaq_plus"), + ( + "easypost_asendia_us_epaq_plus_custom", + "easypost_asendia_us_epaq_plus_custom", + ), + ( + "easypost_asendia_us_epaq_plus_customs_prepaid", + "easypost_asendia_us_epaq_plus_customs_prepaid", + ), + ( + "easypost_asendia_us_epaq_plus_dap", + "easypost_asendia_us_epaq_plus_dap", + ), + ( + "easypost_asendia_us_epaq_plus_ddp", + "easypost_asendia_us_epaq_plus_ddp", + ), + ( + "easypost_asendia_us_epaq_plus_economy", + "easypost_asendia_us_epaq_plus_economy", + ), + ( + "easypost_asendia_us_epaq_plus_wholesale", + "easypost_asendia_us_epaq_plus_wholesale", + ), + ( + "easypost_asendia_us_epaq_pluse_packet", + "easypost_asendia_us_epaq_pluse_packet", + ), + ( + "easypost_asendia_us_epaq_pluse_packet_canada_customs_pre_paid", + "easypost_asendia_us_epaq_pluse_packet_canada_customs_pre_paid", + ), + ( + "easypost_asendia_us_epaq_pluse_packet_canada_ddp", + "easypost_asendia_us_epaq_pluse_packet_canada_ddp", + ), + ( + "easypost_asendia_us_epaq_returns_domestic", + "easypost_asendia_us_epaq_returns_domestic", + ), + ( + "easypost_asendia_us_epaq_returns_international", + "easypost_asendia_us_epaq_returns_international", + ), + ( + "easypost_asendia_us_epaq_select", + "easypost_asendia_us_epaq_select", + ), + ( + "easypost_asendia_us_epaq_select_custom", + "easypost_asendia_us_epaq_select_custom", + ), + ( + "easypost_asendia_us_epaq_select_customs_prepaid_by_shopper", + "easypost_asendia_us_epaq_select_customs_prepaid_by_shopper", + ), + ( + "easypost_asendia_us_epaq_select_dap", + "easypost_asendia_us_epaq_select_dap", + ), + ( + "easypost_asendia_us_epaq_select_ddp", + "easypost_asendia_us_epaq_select_ddp", + ), + ( + "easypost_asendia_us_epaq_select_ddp_direct_access", + "easypost_asendia_us_epaq_select_ddp_direct_access", + ), + ( + "easypost_asendia_us_epaq_select_direct_access", + "easypost_asendia_us_epaq_select_direct_access", + ), + ( + "easypost_asendia_us_epaq_select_direct_access_canada_ddp", + "easypost_asendia_us_epaq_select_direct_access_canada_ddp", + ), + ( + "easypost_asendia_us_epaq_select_economy", + "easypost_asendia_us_epaq_select_economy", + ), + ( + "easypost_asendia_us_epaq_select_oversized", + "easypost_asendia_us_epaq_select_oversized", + ), + ( + "easypost_asendia_us_epaq_select_oversized_ddp", + "easypost_asendia_us_epaq_select_oversized_ddp", + ), + ( + "easypost_asendia_us_epaq_select_pmei", + "easypost_asendia_us_epaq_select_pmei", + ), + ( + "easypost_asendia_us_epaq_select_pmei_canada_customs_pre_paid", + "easypost_asendia_us_epaq_select_pmei_canada_customs_pre_paid", + ), + ( + "easypost_asendia_us_epaq_select_pmeipc_postage", + "easypost_asendia_us_epaq_select_pmeipc_postage", + ), + ( + "easypost_asendia_us_epaq_select_pmi", + "easypost_asendia_us_epaq_select_pmi", + ), + ( + "easypost_asendia_us_epaq_select_pmi_canada_customs_prepaid", + "easypost_asendia_us_epaq_select_pmi_canada_customs_prepaid", + ), + ( + "easypost_asendia_us_epaq_select_pmi_canada_ddp", + "easypost_asendia_us_epaq_select_pmi_canada_ddp", + ), + ( + "easypost_asendia_us_epaq_select_pmi_non_presort", + "easypost_asendia_us_epaq_select_pmi_non_presort", + ), + ( + "easypost_asendia_us_epaq_select_pmipc_postage", + "easypost_asendia_us_epaq_select_pmipc_postage", + ), + ( + "easypost_asendia_us_epaq_standard", + "easypost_asendia_us_epaq_standard", + ), + ( + "easypost_asendia_us_epaq_standard_custom", + "easypost_asendia_us_epaq_standard_custom", + ), + ( + "easypost_asendia_us_epaq_standard_economy", + "easypost_asendia_us_epaq_standard_economy", + ), + ( + "easypost_asendia_us_epaq_standard_ipa", + "easypost_asendia_us_epaq_standard_ipa", + ), + ( + "easypost_asendia_us_epaq_standard_isal", + "easypost_asendia_us_epaq_standard_isal", + ), + ( + "easypost_asendia_us_epaq_select_pmei_non_presort", + "easypost_asendia_us_epaq_select_pmei_non_presort", + ), + ( + "easypost_australiapost_express_post", + "easypost_australiapost_express_post", + ), + ( + "easypost_australiapost_express_post_signature", + "easypost_australiapost_express_post_signature", + ), + ( + "easypost_australiapost_parcel_post", + "easypost_australiapost_parcel_post", + ), + ( + "easypost_australiapost_parcel_post_signature", + "easypost_australiapost_parcel_post_signature", + ), + ( + "easypost_australiapost_parcel_post_extra", + "easypost_australiapost_parcel_post_extra", + ), + ( + "easypost_australiapost_parcel_post_wine_plus_signature", + "easypost_australiapost_parcel_post_wine_plus_signature", + ), + ("easypost_axlehire_delivery", "easypost_axlehire_delivery"), + ( + "easypost_better_trucks_next_day", + "easypost_better_trucks_next_day", + ), + ("easypost_bond_standard", "easypost_bond_standard"), + ( + "easypost_canadapost_regular_parcel", + "easypost_canadapost_regular_parcel", + ), + ( + "easypost_canadapost_expedited_parcel", + "easypost_canadapost_expedited_parcel", + ), + ( + "easypost_canadapost_xpresspost", + "easypost_canadapost_xpresspost", + ), + ( + "easypost_canadapost_xpresspost_certified", + "easypost_canadapost_xpresspost_certified", + ), + ("easypost_canadapost_priority", "easypost_canadapost_priority"), + ( + "easypost_canadapost_library_books", + "easypost_canadapost_library_books", + ), + ( + "easypost_canadapost_expedited_parcel_usa", + "easypost_canadapost_expedited_parcel_usa", + ), + ( + "easypost_canadapost_priority_worldwide_envelope_usa", + "easypost_canadapost_priority_worldwide_envelope_usa", + ), + ( + "easypost_canadapost_priority_worldwide_pak_usa", + "easypost_canadapost_priority_worldwide_pak_usa", + ), + ( + "easypost_canadapost_priority_worldwide_parcel_usa", + "easypost_canadapost_priority_worldwide_parcel_usa", + ), + ( + "easypost_canadapost_small_packet_usa_air", + "easypost_canadapost_small_packet_usa_air", + ), + ( + "easypost_canadapost_tracked_packet_usa", + "easypost_canadapost_tracked_packet_usa", + ), + ( + "easypost_canadapost_tracked_packet_usalvm", + "easypost_canadapost_tracked_packet_usalvm", + ), + ( + "easypost_canadapost_xpresspost_usa", + "easypost_canadapost_xpresspost_usa", + ), + ( + "easypost_canadapost_xpresspost_international", + "easypost_canadapost_xpresspost_international", + ), + ( + "easypost_canadapost_international_parcel_air", + "easypost_canadapost_international_parcel_air", + ), + ( + "easypost_canadapost_international_parcel_surface", + "easypost_canadapost_international_parcel_surface", + ), + ( + "easypost_canadapost_priority_worldwide_envelope_intl", + "easypost_canadapost_priority_worldwide_envelope_intl", + ), + ( + "easypost_canadapost_priority_worldwide_pak_intl", + "easypost_canadapost_priority_worldwide_pak_intl", + ), + ( + "easypost_canadapost_priority_worldwide_parcel_intl", + "easypost_canadapost_priority_worldwide_parcel_intl", + ), + ( + "easypost_canadapost_small_packet_international_air", + "easypost_canadapost_small_packet_international_air", + ), + ( + "easypost_canadapost_small_packet_international_surface", + "easypost_canadapost_small_packet_international_surface", + ), + ( + "easypost_canadapost_tracked_packet_international", + "easypost_canadapost_tracked_packet_international", + ), + ("easypost_canpar_ground", "easypost_canpar_ground"), + ("easypost_canpar_select_letter", "easypost_canpar_select_letter"), + ("easypost_canpar_select_pak", "easypost_canpar_select_pak"), + ("easypost_canpar_select", "easypost_canpar_select"), + ( + "easypost_canpar_overnight_letter", + "easypost_canpar_overnight_letter", + ), + ("easypost_canpar_overnight_pak", "easypost_canpar_overnight_pak"), + ("easypost_canpar_overnight", "easypost_canpar_overnight"), + ("easypost_canpar_select_usa", "easypost_canpar_select_usa"), + ("easypost_canpar_usa_pak", "easypost_canpar_usa_pak"), + ("easypost_canpar_usa_letter", "easypost_canpar_usa_letter"), + ("easypost_canpar_usa", "easypost_canpar_usa"), + ("easypost_canpar_international", "easypost_canpar_international"), + ("easypost_cdl_distribution", "easypost_cdl_distribution"), + ("easypost_cdl_same_day", "easypost_cdl_same_day"), + ( + "easypost_courier_express_basic_parcel", + "easypost_courier_express_basic_parcel", + ), + ( + "easypost_couriersplease_domestic_priority_signature", + "easypost_couriersplease_domestic_priority_signature", + ), + ( + "easypost_couriersplease_domestic_priority", + "easypost_couriersplease_domestic_priority", + ), + ( + "easypost_couriersplease_domestic_off_peak_signature", + "easypost_couriersplease_domestic_off_peak_signature", + ), + ( + "easypost_couriersplease_domestic_off_peak", + "easypost_couriersplease_domestic_off_peak", + ), + ( + "easypost_couriersplease_gold_domestic_signature", + "easypost_couriersplease_gold_domestic_signature", + ), + ( + "easypost_couriersplease_gold_domestic", + "easypost_couriersplease_gold_domestic", + ), + ( + "easypost_couriersplease_australian_city_express_signature", + "easypost_couriersplease_australian_city_express_signature", + ), + ( + "easypost_couriersplease_australian_city_express", + "easypost_couriersplease_australian_city_express", + ), + ( + "easypost_couriersplease_domestic_saver_signature", + "easypost_couriersplease_domestic_saver_signature", + ), + ( + "easypost_couriersplease_domestic_saver", + "easypost_couriersplease_domestic_saver", + ), + ( + "easypost_couriersplease_road_express", + "easypost_couriersplease_road_express", + ), + ( + "easypost_couriersplease_5_kg_satchel", + "easypost_couriersplease_5_kg_satchel", + ), + ( + "easypost_couriersplease_3_kg_satchel", + "easypost_couriersplease_3_kg_satchel", + ), + ( + "easypost_couriersplease_1_kg_satchel", + "easypost_couriersplease_1_kg_satchel", + ), + ( + "easypost_couriersplease_5_kg_satchel_atl", + "easypost_couriersplease_5_kg_satchel_atl", + ), + ( + "easypost_couriersplease_3_kg_satchel_atl", + "easypost_couriersplease_3_kg_satchel_atl", + ), + ( + "easypost_couriersplease_1_kg_satchel_atl", + "easypost_couriersplease_1_kg_satchel_atl", + ), + ( + "easypost_couriersplease_500_gram_satchel", + "easypost_couriersplease_500_gram_satchel", + ), + ( + "easypost_couriersplease_500_gram_satchel_atl", + "easypost_couriersplease_500_gram_satchel_atl", + ), + ( + "easypost_couriersplease_25_kg_parcel", + "easypost_couriersplease_25_kg_parcel", + ), + ( + "easypost_couriersplease_10_kg_parcel", + "easypost_couriersplease_10_kg_parcel", + ), + ( + "easypost_couriersplease_5_kg_parcel", + "easypost_couriersplease_5_kg_parcel", + ), + ( + "easypost_couriersplease_3_kg_parcel", + "easypost_couriersplease_3_kg_parcel", + ), + ( + "easypost_couriersplease_1_kg_parcel", + "easypost_couriersplease_1_kg_parcel", + ), + ( + "easypost_couriersplease_500_gram_parcel", + "easypost_couriersplease_500_gram_parcel", + ), + ( + "easypost_couriersplease_500_gram_parcel_atl", + "easypost_couriersplease_500_gram_parcel_atl", + ), + ( + "easypost_couriersplease_express_international_priority", + "easypost_couriersplease_express_international_priority", + ), + ( + "easypost_couriersplease_international_saver", + "easypost_couriersplease_international_saver", + ), + ( + "easypost_couriersplease_international_express_import", + "easypost_couriersplease_international_express_import", + ), + ( + "easypost_couriersplease_domestic_tracked", + "easypost_couriersplease_domestic_tracked", + ), + ( + "easypost_couriersplease_international_economy", + "easypost_couriersplease_international_economy", + ), + ( + "easypost_couriersplease_international_standard", + "easypost_couriersplease_international_standard", + ), + ( + "easypost_couriersplease_international_express", + "easypost_couriersplease_international_express", + ), + ( + "easypost_deutschepost_packet_plus", + "easypost_deutschepost_packet_plus", + ), + ( + "easypost_deutschepost_uk_priority_packet_plus", + "easypost_deutschepost_uk_priority_packet_plus", + ), + ( + "easypost_deutschepost_uk_priority_packet", + "easypost_deutschepost_uk_priority_packet", + ), + ( + "easypost_deutschepost_uk_priority_packet_tracked", + "easypost_deutschepost_uk_priority_packet_tracked", + ), + ( + "easypost_deutschepost_uk_business_mail_registered", + "easypost_deutschepost_uk_business_mail_registered", + ), + ( + "easypost_deutschepost_uk_standard_packet", + "easypost_deutschepost_uk_standard_packet", + ), + ( + "easypost_deutschepost_uk_business_mail_standard", + "easypost_deutschepost_uk_business_mail_standard", + ), + ("easypost_dhl_ecom_asia_packet", "easypost_dhl_ecom_asia_packet"), + ( + "easypost_dhl_ecom_asia_parcel_direct", + "easypost_dhl_ecom_asia_parcel_direct", + ), + ( + "easypost_dhl_ecom_asia_parcel_direct_expedited", + "easypost_dhl_ecom_asia_parcel_direct_expedited", + ), + ( + "easypost_dhl_ecom_parcel_expedited", + "easypost_dhl_ecom_parcel_expedited", + ), + ( + "easypost_dhl_ecom_parcel_expedited_max", + "easypost_dhl_ecom_parcel_expedited_max", + ), + ( + "easypost_dhl_ecom_parcel_ground", + "easypost_dhl_ecom_parcel_ground", + ), + ( + "easypost_dhl_ecom_bpm_expedited", + "easypost_dhl_ecom_bpm_expedited", + ), + ("easypost_dhl_ecom_bpm_ground", "easypost_dhl_ecom_bpm_ground"), + ( + "easypost_dhl_ecom_parcel_international_direct", + "easypost_dhl_ecom_parcel_international_direct", + ), + ( + "easypost_dhl_ecom_parcel_international_standard", + "easypost_dhl_ecom_parcel_international_standard", + ), + ( + "easypost_dhl_ecom_packet_international", + "easypost_dhl_ecom_packet_international", + ), + ( + "easypost_dhl_ecom_parcel_international_direct_priority", + "easypost_dhl_ecom_parcel_international_direct_priority", + ), + ( + "easypost_dhl_ecom_parcel_international_direct_standard", + "easypost_dhl_ecom_parcel_international_direct_standard", + ), + ( + "easypost_dhl_express_break_bulk_economy", + "easypost_dhl_express_break_bulk_economy", + ), + ( + "easypost_dhl_express_break_bulk_express", + "easypost_dhl_express_break_bulk_express", + ), + ( + "easypost_dhl_express_domestic_economy_select", + "easypost_dhl_express_domestic_economy_select", + ), + ( + "easypost_dhl_express_domestic_express", + "easypost_dhl_express_domestic_express", + ), + ( + "easypost_dhl_express_domestic_express1030", + "easypost_dhl_express_domestic_express1030", + ), + ( + "easypost_dhl_express_domestic_express1200", + "easypost_dhl_express_domestic_express1200", + ), + ( + "easypost_dhl_express_economy_select", + "easypost_dhl_express_economy_select", + ), + ( + "easypost_dhl_express_economy_select_non_doc", + "easypost_dhl_express_economy_select_non_doc", + ), + ( + "easypost_dhl_express_euro_pack", + "easypost_dhl_express_euro_pack", + ), + ( + "easypost_dhl_express_europack_non_doc", + "easypost_dhl_express_europack_non_doc", + ), + ( + "easypost_dhl_express_express1030", + "easypost_dhl_express_express1030", + ), + ( + "easypost_dhl_express_express1030_non_doc", + "easypost_dhl_express_express1030_non_doc", + ), + ( + "easypost_dhl_express_express1200_non_doc", + "easypost_dhl_express_express1200_non_doc", + ), + ( + "easypost_dhl_express_express1200", + "easypost_dhl_express_express1200", + ), + ( + "easypost_dhl_express_express900", + "easypost_dhl_express_express900", + ), + ( + "easypost_dhl_express_express900_non_doc", + "easypost_dhl_express_express900_non_doc", + ), + ( + "easypost_dhl_express_express_easy", + "easypost_dhl_express_express_easy", + ), + ( + "easypost_dhl_express_express_easy_non_doc", + "easypost_dhl_express_express_easy_non_doc", + ), + ( + "easypost_dhl_express_express_envelope", + "easypost_dhl_express_express_envelope", + ), + ( + "easypost_dhl_express_express_worldwide", + "easypost_dhl_express_express_worldwide", + ), + ( + "easypost_dhl_express_express_worldwide_b2_c", + "easypost_dhl_express_express_worldwide_b2_c", + ), + ( + "easypost_dhl_express_express_worldwide_b2_c_non_doc", + "easypost_dhl_express_express_worldwide_b2_c_non_doc", + ), + ( + "easypost_dhl_express_express_worldwide_ecx", + "easypost_dhl_express_express_worldwide_ecx", + ), + ( + "easypost_dhl_express_express_worldwide_non_doc", + "easypost_dhl_express_express_worldwide_non_doc", + ), + ( + "easypost_dhl_express_freight_worldwide", + "easypost_dhl_express_freight_worldwide", + ), + ( + "easypost_dhl_express_globalmail_business", + "easypost_dhl_express_globalmail_business", + ), + ("easypost_dhl_express_jet_line", "easypost_dhl_express_jet_line"), + ( + "easypost_dhl_express_jumbo_box", + "easypost_dhl_express_jumbo_box", + ), + ( + "easypost_dhl_express_logistics_services", + "easypost_dhl_express_logistics_services", + ), + ("easypost_dhl_express_same_day", "easypost_dhl_express_same_day"), + ( + "easypost_dhl_express_secure_line", + "easypost_dhl_express_secure_line", + ), + ( + "easypost_dhl_express_sprint_line", + "easypost_dhl_express_sprint_line", + ), + ("easypost_dpd_classic", "easypost_dpd_classic"), + ("easypost_dpd_8_30", "easypost_dpd_8_30"), + ("easypost_dpd_10_00", "easypost_dpd_10_00"), + ("easypost_dpd_12_00", "easypost_dpd_12_00"), + ("easypost_dpd_18_00", "easypost_dpd_18_00"), + ("easypost_dpd_express", "easypost_dpd_express"), + ("easypost_dpd_parcelletter", "easypost_dpd_parcelletter"), + ("easypost_dpd_parcelletterplus", "easypost_dpd_parcelletterplus"), + ( + "easypost_dpd_internationalmail", + "easypost_dpd_internationalmail", + ), + ( + "easypost_dpd_uk_air_express_international_air", + "easypost_dpd_uk_air_express_international_air", + ), + ( + "easypost_dpd_uk_air_classic_international_air", + "easypost_dpd_uk_air_classic_international_air", + ), + ("easypost_dpd_uk_parcel_sunday", "easypost_dpd_uk_parcel_sunday"), + ( + "easypost_dpd_uk_freight_parcel_sunday", + "easypost_dpd_uk_freight_parcel_sunday", + ), + ("easypost_dpd_uk_pallet_sunday", "easypost_dpd_uk_pallet_sunday"), + ( + "easypost_dpd_uk_pallet_dpd_classic", + "easypost_dpd_uk_pallet_dpd_classic", + ), + ( + "easypost_dpd_uk_expresspak_dpd_classic", + "easypost_dpd_uk_expresspak_dpd_classic", + ), + ( + "easypost_dpd_uk_expresspak_sunday", + "easypost_dpd_uk_expresspak_sunday", + ), + ( + "easypost_dpd_uk_parcel_dpd_classic", + "easypost_dpd_uk_parcel_dpd_classic", + ), + ( + "easypost_dpd_uk_parcel_dpd_two_day", + "easypost_dpd_uk_parcel_dpd_two_day", + ), + ( + "easypost_dpd_uk_parcel_dpd_next_day", + "easypost_dpd_uk_parcel_dpd_next_day", + ), + ("easypost_dpd_uk_parcel_dpd12", "easypost_dpd_uk_parcel_dpd12"), + ("easypost_dpd_uk_parcel_dpd10", "easypost_dpd_uk_parcel_dpd10"), + ( + "easypost_dpd_uk_parcel_return_to_shop", + "easypost_dpd_uk_parcel_return_to_shop", + ), + ( + "easypost_dpd_uk_parcel_saturday", + "easypost_dpd_uk_parcel_saturday", + ), + ( + "easypost_dpd_uk_parcel_saturday12", + "easypost_dpd_uk_parcel_saturday12", + ), + ( + "easypost_dpd_uk_parcel_saturday10", + "easypost_dpd_uk_parcel_saturday10", + ), + ( + "easypost_dpd_uk_parcel_sunday12", + "easypost_dpd_uk_parcel_sunday12", + ), + ( + "easypost_dpd_uk_freight_parcel_dpd_classic", + "easypost_dpd_uk_freight_parcel_dpd_classic", + ), + ( + "easypost_dpd_uk_freight_parcel_sunday12", + "easypost_dpd_uk_freight_parcel_sunday12", + ), + ( + "easypost_dpd_uk_expresspak_dpd_next_day", + "easypost_dpd_uk_expresspak_dpd_next_day", + ), + ( + "easypost_dpd_uk_expresspak_dpd12", + "easypost_dpd_uk_expresspak_dpd12", + ), + ( + "easypost_dpd_uk_expresspak_dpd10", + "easypost_dpd_uk_expresspak_dpd10", + ), + ( + "easypost_dpd_uk_expresspak_saturday", + "easypost_dpd_uk_expresspak_saturday", + ), + ( + "easypost_dpd_uk_expresspak_saturday12", + "easypost_dpd_uk_expresspak_saturday12", + ), + ( + "easypost_dpd_uk_expresspak_saturday10", + "easypost_dpd_uk_expresspak_saturday10", + ), + ( + "easypost_dpd_uk_expresspak_sunday12", + "easypost_dpd_uk_expresspak_sunday12", + ), + ( + "easypost_dpd_uk_pallet_sunday12", + "easypost_dpd_uk_pallet_sunday12", + ), + ( + "easypost_dpd_uk_pallet_dpd_two_day", + "easypost_dpd_uk_pallet_dpd_two_day", + ), + ( + "easypost_dpd_uk_pallet_dpd_next_day", + "easypost_dpd_uk_pallet_dpd_next_day", + ), + ("easypost_dpd_uk_pallet_dpd12", "easypost_dpd_uk_pallet_dpd12"), + ("easypost_dpd_uk_pallet_dpd10", "easypost_dpd_uk_pallet_dpd10"), + ( + "easypost_dpd_uk_pallet_saturday", + "easypost_dpd_uk_pallet_saturday", + ), + ( + "easypost_dpd_uk_pallet_saturday12", + "easypost_dpd_uk_pallet_saturday12", + ), + ( + "easypost_dpd_uk_pallet_saturday10", + "easypost_dpd_uk_pallet_saturday10", + ), + ( + "easypost_dpd_uk_freight_parcel_dpd_two_day", + "easypost_dpd_uk_freight_parcel_dpd_two_day", + ), + ( + "easypost_dpd_uk_freight_parcel_dpd_next_day", + "easypost_dpd_uk_freight_parcel_dpd_next_day", + ), + ( + "easypost_dpd_uk_freight_parcel_dpd12", + "easypost_dpd_uk_freight_parcel_dpd12", + ), + ( + "easypost_dpd_uk_freight_parcel_dpd10", + "easypost_dpd_uk_freight_parcel_dpd10", + ), + ( + "easypost_dpd_uk_freight_parcel_saturday", + "easypost_dpd_uk_freight_parcel_saturday", + ), + ( + "easypost_dpd_uk_freight_parcel_saturday12", + "easypost_dpd_uk_freight_parcel_saturday12", + ), + ( + "easypost_dpd_uk_freight_parcel_saturday10", + "easypost_dpd_uk_freight_parcel_saturday10", + ), + ( + "easypost_epost_courier_service_ddp", + "easypost_epost_courier_service_ddp", + ), + ( + "easypost_epost_courier_service_ddu", + "easypost_epost_courier_service_ddu", + ), + ( + "easypost_epost_domestic_economy_parcel", + "easypost_epost_domestic_economy_parcel", + ), + ( + "easypost_epost_domestic_parcel_bpm", + "easypost_epost_domestic_parcel_bpm", + ), + ( + "easypost_epost_domestic_priority_parcel", + "easypost_epost_domestic_priority_parcel", + ), + ( + "easypost_epost_domestic_priority_parcel_bpm", + "easypost_epost_domestic_priority_parcel_bpm", + ), + ("easypost_epost_emi_service", "easypost_epost_emi_service"), + ( + "easypost_epost_economy_parcel_service", + "easypost_epost_economy_parcel_service", + ), + ("easypost_epost_ipa_service", "easypost_epost_ipa_service"), + ("easypost_epost_isal_service", "easypost_epost_isal_service"), + ("easypost_epost_pmi_service", "easypost_epost_pmi_service"), + ( + "easypost_epost_priority_parcel_ddp", + "easypost_epost_priority_parcel_ddp", + ), + ( + "easypost_epost_priority_parcel_ddu", + "easypost_epost_priority_parcel_ddu", + ), + ( + "easypost_epost_priority_parcel_delivery_confirmation_ddp", + "easypost_epost_priority_parcel_delivery_confirmation_ddp", + ), + ( + "easypost_epost_priority_parcel_delivery_confirmation_ddu", + "easypost_epost_priority_parcel_delivery_confirmation_ddu", + ), + ( + "easypost_epost_epacket_service", + "easypost_epost_epacket_service", + ), + ( + "easypost_estafeta_next_day_by930", + "easypost_estafeta_next_day_by930", + ), + ( + "easypost_estafeta_next_day_by1130", + "easypost_estafeta_next_day_by1130", + ), + ("easypost_estafeta_next_day", "easypost_estafeta_next_day"), + ("easypost_estafeta_two_day", "easypost_estafeta_two_day"), + ("easypost_estafeta_ltl", "easypost_estafeta_ltl"), + ("easypost_fastway_parcel", "easypost_fastway_parcel"), + ("easypost_fastway_satchel", "easypost_fastway_satchel"), + ("easypost_fedex_ground", "easypost_fedex_ground"), + ("easypost_fedex_2_day", "easypost_fedex_2_day"), + ("easypost_fedex_2_day_am", "easypost_fedex_2_day_am"), + ("easypost_fedex_express_saver", "easypost_fedex_express_saver"), + ( + "easypost_fedex_standard_overnight", + "easypost_fedex_standard_overnight", + ), + ( + "easypost_fedex_first_overnight", + "easypost_fedex_first_overnight", + ), + ( + "easypost_fedex_priority_overnight", + "easypost_fedex_priority_overnight", + ), + ( + "easypost_fedex_international_economy", + "easypost_fedex_international_economy", + ), + ( + "easypost_fedex_international_first", + "easypost_fedex_international_first", + ), + ( + "easypost_fedex_international_priority", + "easypost_fedex_international_priority", + ), + ( + "easypost_fedex_ground_home_delivery", + "easypost_fedex_ground_home_delivery", + ), + ( + "easypost_fedex_crossborder_cbec", + "easypost_fedex_crossborder_cbec", + ), + ( + "easypost_fedex_crossborder_cbecl", + "easypost_fedex_crossborder_cbecl", + ), + ( + "easypost_fedex_crossborder_cbecp", + "easypost_fedex_crossborder_cbecp", + ), + ( + "easypost_fedex_sameday_city_economy_service", + "easypost_fedex_sameday_city_economy_service", + ), + ( + "easypost_fedex_sameday_city_standard_service", + "easypost_fedex_sameday_city_standard_service", + ), + ( + "easypost_fedex_sameday_city_priority_service", + "easypost_fedex_sameday_city_priority_service", + ), + ( + "easypost_fedex_sameday_city_last_mile", + "easypost_fedex_sameday_city_last_mile", + ), + ("easypost_fedex_smart_post", "easypost_fedex_smart_post"), + ("easypost_globegistics_pmei", "easypost_globegistics_pmei"), + ( + "easypost_globegistics_ecom_domestic", + "easypost_globegistics_ecom_domestic", + ), + ( + "easypost_globegistics_ecom_europe", + "easypost_globegistics_ecom_europe", + ), + ( + "easypost_globegistics_ecom_express", + "easypost_globegistics_ecom_express", + ), + ( + "easypost_globegistics_ecom_extra", + "easypost_globegistics_ecom_extra", + ), + ( + "easypost_globegistics_ecom_ipa", + "easypost_globegistics_ecom_ipa", + ), + ( + "easypost_globegistics_ecom_isal", + "easypost_globegistics_ecom_isal", + ), + ( + "easypost_globegistics_ecom_pmei_duty_paid", + "easypost_globegistics_ecom_pmei_duty_paid", + ), + ( + "easypost_globegistics_ecom_pmi_duty_paid", + "easypost_globegistics_ecom_pmi_duty_paid", + ), + ( + "easypost_globegistics_ecom_packet", + "easypost_globegistics_ecom_packet", + ), + ( + "easypost_globegistics_ecom_packet_ddp", + "easypost_globegistics_ecom_packet_ddp", + ), + ( + "easypost_globegistics_ecom_priority", + "easypost_globegistics_ecom_priority", + ), + ( + "easypost_globegistics_ecom_standard", + "easypost_globegistics_ecom_standard", + ), + ( + "easypost_globegistics_ecom_tracked_ddp", + "easypost_globegistics_ecom_tracked_ddp", + ), + ( + "easypost_globegistics_ecom_tracked_ddu", + "easypost_globegistics_ecom_tracked_ddu", + ), + ( + "easypost_gso_early_priority_overnight", + "easypost_gso_early_priority_overnight", + ), + ( + "easypost_gso_priority_overnight", + "easypost_gso_priority_overnight", + ), + ( + "easypost_gso_california_parcel_service", + "easypost_gso_california_parcel_service", + ), + ( + "easypost_gso_saturday_delivery_service", + "easypost_gso_saturday_delivery_service", + ), + ( + "easypost_gso_early_saturday_service", + "easypost_gso_early_saturday_service", + ), + ( + "easypost_hermes_domestic_delivery", + "easypost_hermes_domestic_delivery", + ), + ( + "easypost_hermes_domestic_delivery_signed", + "easypost_hermes_domestic_delivery_signed", + ), + ( + "easypost_hermes_international_delivery", + "easypost_hermes_international_delivery", + ), + ( + "easypost_hermes_international_delivery_signed", + "easypost_hermes_international_delivery_signed", + ), + ( + "easypost_interlink_air_classic_international_air", + "easypost_interlink_air_classic_international_air", + ), + ( + "easypost_interlink_air_express_international_air", + "easypost_interlink_air_express_international_air", + ), + ( + "easypost_interlink_expresspak1_by10_30", + "easypost_interlink_expresspak1_by10_30", + ), + ( + "easypost_interlink_expresspak1_by12", + "easypost_interlink_expresspak1_by12", + ), + ( + "easypost_interlink_expresspak1_next_day", + "easypost_interlink_expresspak1_next_day", + ), + ( + "easypost_interlink_expresspak1_saturday", + "easypost_interlink_expresspak1_saturday", + ), + ( + "easypost_interlink_expresspak1_saturday_by10_30", + "easypost_interlink_expresspak1_saturday_by10_30", + ), + ( + "easypost_interlink_expresspak1_saturday_by12", + "easypost_interlink_expresspak1_saturday_by12", + ), + ( + "easypost_interlink_expresspak1_sunday", + "easypost_interlink_expresspak1_sunday", + ), + ( + "easypost_interlink_expresspak1_sunday_by12", + "easypost_interlink_expresspak1_sunday_by12", + ), + ( + "easypost_interlink_expresspak5_by10", + "easypost_interlink_expresspak5_by10", + ), + ( + "easypost_interlink_expresspak5_by10_30", + "easypost_interlink_expresspak5_by10_30", + ), + ( + "easypost_interlink_expresspak5_by12", + "easypost_interlink_expresspak5_by12", + ), + ( + "easypost_interlink_expresspak5_next_day", + "easypost_interlink_expresspak5_next_day", + ), + ( + "easypost_interlink_expresspak5_saturday", + "easypost_interlink_expresspak5_saturday", + ), + ( + "easypost_interlink_expresspak5_saturday_by10", + "easypost_interlink_expresspak5_saturday_by10", + ), + ( + "easypost_interlink_expresspak5_saturday_by10_30", + "easypost_interlink_expresspak5_saturday_by10_30", + ), + ( + "easypost_interlink_expresspak5_saturday_by12", + "easypost_interlink_expresspak5_saturday_by12", + ), + ( + "easypost_interlink_expresspak5_sunday", + "easypost_interlink_expresspak5_sunday", + ), + ( + "easypost_interlink_expresspak5_sunday_by12", + "easypost_interlink_expresspak5_sunday_by12", + ), + ( + "easypost_interlink_freight_by10", + "easypost_interlink_freight_by10", + ), + ( + "easypost_interlink_freight_by12", + "easypost_interlink_freight_by12", + ), + ( + "easypost_interlink_freight_next_day", + "easypost_interlink_freight_next_day", + ), + ( + "easypost_interlink_freight_saturday", + "easypost_interlink_freight_saturday", + ), + ( + "easypost_interlink_freight_saturday_by10", + "easypost_interlink_freight_saturday_by10", + ), + ( + "easypost_interlink_freight_saturday_by12", + "easypost_interlink_freight_saturday_by12", + ), + ( + "easypost_interlink_freight_sunday", + "easypost_interlink_freight_sunday", + ), + ( + "easypost_interlink_freight_sunday_by12", + "easypost_interlink_freight_sunday_by12", + ), + ( + "easypost_interlink_parcel_by10", + "easypost_interlink_parcel_by10", + ), + ( + "easypost_interlink_parcel_by10_30", + "easypost_interlink_parcel_by10_30", + ), + ( + "easypost_interlink_parcel_by12", + "easypost_interlink_parcel_by12", + ), + ( + "easypost_interlink_parcel_dpd_europe_by_road", + "easypost_interlink_parcel_dpd_europe_by_road", + ), + ( + "easypost_interlink_parcel_next_day", + "easypost_interlink_parcel_next_day", + ), + ( + "easypost_interlink_parcel_return", + "easypost_interlink_parcel_return", + ), + ( + "easypost_interlink_parcel_return_to_shop", + "easypost_interlink_parcel_return_to_shop", + ), + ( + "easypost_interlink_parcel_saturday", + "easypost_interlink_parcel_saturday", + ), + ( + "easypost_interlink_parcel_saturday_by10", + "easypost_interlink_parcel_saturday_by10", + ), + ( + "easypost_interlink_parcel_saturday_by10_30", + "easypost_interlink_parcel_saturday_by10_30", + ), + ( + "easypost_interlink_parcel_saturday_by12", + "easypost_interlink_parcel_saturday_by12", + ), + ( + "easypost_interlink_parcel_ship_to_shop", + "easypost_interlink_parcel_ship_to_shop", + ), + ( + "easypost_interlink_parcel_sunday", + "easypost_interlink_parcel_sunday", + ), + ( + "easypost_interlink_parcel_sunday_by12", + "easypost_interlink_parcel_sunday_by12", + ), + ( + "easypost_interlink_parcel_two_day", + "easypost_interlink_parcel_two_day", + ), + ( + "easypost_interlink_pickup_parcel_dpd_europe_by_road", + "easypost_interlink_pickup_parcel_dpd_europe_by_road", + ), + ("easypost_lasership_weekend", "easypost_lasership_weekend"), + ("easypost_loomis_ground", "easypost_loomis_ground"), + ("easypost_loomis_express1800", "easypost_loomis_express1800"), + ("easypost_loomis_express1200", "easypost_loomis_express1200"), + ("easypost_loomis_express900", "easypost_loomis_express900"), + ("easypost_lso_ground_early", "easypost_lso_ground_early"), + ("easypost_lso_ground_basic", "easypost_lso_ground_basic"), + ("easypost_lso_priority_basic", "easypost_lso_priority_basic"), + ("easypost_lso_priority_early", "easypost_lso_priority_early"), + ( + "easypost_lso_priority_saturday", + "easypost_lso_priority_saturday", + ), + ("easypost_lso_priority2nd_day", "easypost_lso_priority2nd_day"), + ( + "easypost_newgistics_parcel_select", + "easypost_newgistics_parcel_select", + ), + ( + "easypost_newgistics_parcel_select_lightweight", + "easypost_newgistics_parcel_select_lightweight", + ), + ("easypost_newgistics_express", "easypost_newgistics_express"), + ( + "easypost_newgistics_first_class_mail", + "easypost_newgistics_first_class_mail", + ), + ( + "easypost_newgistics_priority_mail", + "easypost_newgistics_priority_mail", + ), + ( + "easypost_newgistics_bound_printed_matter", + "easypost_newgistics_bound_printed_matter", + ), + ("easypost_ontrac_sunrise", "easypost_ontrac_sunrise"), + ("easypost_ontrac_gold", "easypost_ontrac_gold"), + ( + "easypost_ontrac_on_trac_ground", + "easypost_ontrac_on_trac_ground", + ), + ( + "easypost_ontrac_palletized_freight", + "easypost_ontrac_palletized_freight", + ), + ("easypost_osm_first", "easypost_osm_first"), + ("easypost_osm_expedited", "easypost_osm_expedited"), + ("easypost_osm_bpm", "easypost_osm_bpm"), + ("easypost_osm_media_mail", "easypost_osm_media_mail"), + ("easypost_osm_marketing_parcel", "easypost_osm_marketing_parcel"), + ( + "easypost_osm_marketing_parcel_tracked", + "easypost_osm_marketing_parcel_tracked", + ), + ("easypost_parcll_economy_west", "easypost_parcll_economy_west"), + ("easypost_parcll_economy_east", "easypost_parcll_economy_east"), + ( + "easypost_parcll_economy_central", + "easypost_parcll_economy_central", + ), + ( + "easypost_parcll_economy_northeast", + "easypost_parcll_economy_northeast", + ), + ("easypost_parcll_economy_south", "easypost_parcll_economy_south"), + ( + "easypost_parcll_expedited_west", + "easypost_parcll_expedited_west", + ), + ( + "easypost_parcll_expedited_northeast", + "easypost_parcll_expedited_northeast", + ), + ("easypost_parcll_regional_west", "easypost_parcll_regional_west"), + ("easypost_parcll_regional_east", "easypost_parcll_regional_east"), + ( + "easypost_parcll_regional_central", + "easypost_parcll_regional_central", + ), + ( + "easypost_parcll_regional_northeast", + "easypost_parcll_regional_northeast", + ), + ( + "easypost_parcll_regional_south", + "easypost_parcll_regional_south", + ), + ( + "easypost_parcll_us_to_canada_economy_west", + "easypost_parcll_us_to_canada_economy_west", + ), + ( + "easypost_parcll_us_to_canada_economy_central", + "easypost_parcll_us_to_canada_economy_central", + ), + ( + "easypost_parcll_us_to_canada_economy_northeast", + "easypost_parcll_us_to_canada_economy_northeast", + ), + ( + "easypost_parcll_us_to_europe_economy_west", + "easypost_parcll_us_to_europe_economy_west", + ), + ( + "easypost_parcll_us_to_europe_economy_northeast", + "easypost_parcll_us_to_europe_economy_northeast", + ), + ("easypost_purolator_express", "easypost_purolator_express"), + ( + "easypost_purolator_express12_pm", + "easypost_purolator_express12_pm", + ), + ( + "easypost_purolator_express_pack12_pm", + "easypost_purolator_express_pack12_pm", + ), + ( + "easypost_purolator_express_box12_pm", + "easypost_purolator_express_box12_pm", + ), + ( + "easypost_purolator_express_envelope12_pm", + "easypost_purolator_express_envelope12_pm", + ), + ( + "easypost_purolator_express1030_am", + "easypost_purolator_express1030_am", + ), + ( + "easypost_purolator_express9_am", + "easypost_purolator_express9_am", + ), + ( + "easypost_purolator_express_box", + "easypost_purolator_express_box", + ), + ( + "easypost_purolator_express_box1030_am", + "easypost_purolator_express_box1030_am", + ), + ( + "easypost_purolator_express_box9_am", + "easypost_purolator_express_box9_am", + ), + ( + "easypost_purolator_express_box_evening", + "easypost_purolator_express_box_evening", + ), + ( + "easypost_purolator_express_box_international", + "easypost_purolator_express_box_international", + ), + ( + "easypost_purolator_express_box_international1030_am", + "easypost_purolator_express_box_international1030_am", + ), + ( + "easypost_purolator_express_box_international1200", + "easypost_purolator_express_box_international1200", + ), + ( + "easypost_purolator_express_box_international9_am", + "easypost_purolator_express_box_international9_am", + ), + ( + "easypost_purolator_express_box_us", + "easypost_purolator_express_box_us", + ), + ( + "easypost_purolator_express_box_us1030_am", + "easypost_purolator_express_box_us1030_am", + ), + ( + "easypost_purolator_express_box_us1200", + "easypost_purolator_express_box_us1200", + ), + ( + "easypost_purolator_express_box_us9_am", + "easypost_purolator_express_box_us9_am", + ), + ( + "easypost_purolator_express_envelope", + "easypost_purolator_express_envelope", + ), + ( + "easypost_purolator_express_envelope1030_am", + "easypost_purolator_express_envelope1030_am", + ), + ( + "easypost_purolator_express_envelope9_am", + "easypost_purolator_express_envelope9_am", + ), + ( + "easypost_purolator_express_envelope_evening", + "easypost_purolator_express_envelope_evening", + ), + ( + "easypost_purolator_express_envelope_international", + "easypost_purolator_express_envelope_international", + ), + ( + "easypost_purolator_express_envelope_international1030_am", + "easypost_purolator_express_envelope_international1030_am", + ), + ( + "easypost_purolator_express_envelope_international1200", + "easypost_purolator_express_envelope_international1200", + ), + ( + "easypost_purolator_express_envelope_international9_am", + "easypost_purolator_express_envelope_international9_am", + ), + ( + "easypost_purolator_express_envelope_us", + "easypost_purolator_express_envelope_us", + ), + ( + "easypost_purolator_express_envelope_us1030_am", + "easypost_purolator_express_envelope_us1030_am", + ), + ( + "easypost_purolator_express_envelope_us1200", + "easypost_purolator_express_envelope_us1200", + ), + ( + "easypost_purolator_express_envelope_us9_am", + "easypost_purolator_express_envelope_us9_am", + ), + ( + "easypost_purolator_express_evening", + "easypost_purolator_express_evening", + ), + ( + "easypost_purolator_express_international", + "easypost_purolator_express_international", + ), + ( + "easypost_purolator_express_international1030_am", + "easypost_purolator_express_international1030_am", + ), + ( + "easypost_purolator_express_international1200", + "easypost_purolator_express_international1200", + ), + ( + "easypost_purolator_express_international9_am", + "easypost_purolator_express_international9_am", + ), + ( + "easypost_purolator_express_pack", + "easypost_purolator_express_pack", + ), + ( + "easypost_purolator_express_pack1030_am", + "easypost_purolator_express_pack1030_am", + ), + ( + "easypost_purolator_express_pack9_am", + "easypost_purolator_express_pack9_am", + ), + ( + "easypost_purolator_express_pack_evening", + "easypost_purolator_express_pack_evening", + ), + ( + "easypost_purolator_express_pack_international", + "easypost_purolator_express_pack_international", + ), + ( + "easypost_purolator_express_pack_international1030_am", + "easypost_purolator_express_pack_international1030_am", + ), + ( + "easypost_purolator_express_pack_international1200", + "easypost_purolator_express_pack_international1200", + ), + ( + "easypost_purolator_express_pack_international9_am", + "easypost_purolator_express_pack_international9_am", + ), + ( + "easypost_purolator_express_pack_us", + "easypost_purolator_express_pack_us", + ), + ( + "easypost_purolator_express_pack_us1030_am", + "easypost_purolator_express_pack_us1030_am", + ), + ( + "easypost_purolator_express_pack_us1200", + "easypost_purolator_express_pack_us1200", + ), + ( + "easypost_purolator_express_pack_us9_am", + "easypost_purolator_express_pack_us9_am", + ), + ("easypost_purolator_express_us", "easypost_purolator_express_us"), + ( + "easypost_purolator_express_us1030_am", + "easypost_purolator_express_us1030_am", + ), + ( + "easypost_purolator_express_us1200", + "easypost_purolator_express_us1200", + ), + ( + "easypost_purolator_express_us9_am", + "easypost_purolator_express_us9_am", + ), + ("easypost_purolator_ground", "easypost_purolator_ground"), + ( + "easypost_purolator_ground1030_am", + "easypost_purolator_ground1030_am", + ), + ("easypost_purolator_ground9_am", "easypost_purolator_ground9_am"), + ( + "easypost_purolator_ground_distribution", + "easypost_purolator_ground_distribution", + ), + ( + "easypost_purolator_ground_evening", + "easypost_purolator_ground_evening", + ), + ( + "easypost_purolator_ground_regional", + "easypost_purolator_ground_regional", + ), + ("easypost_purolator_ground_us", "easypost_purolator_ground_us"), + ( + "easypost_royalmail_international_signed", + "easypost_royalmail_international_signed", + ), + ( + "easypost_royalmail_international_tracked", + "easypost_royalmail_international_tracked", + ), + ( + "easypost_royalmail_international_tracked_and_signed", + "easypost_royalmail_international_tracked_and_signed", + ), + ("easypost_royalmail_1st_class", "easypost_royalmail_1st_class"), + ( + "easypost_royalmail_1st_class_signed_for", + "easypost_royalmail_1st_class_signed_for", + ), + ("easypost_royalmail_2nd_class", "easypost_royalmail_2nd_class"), + ( + "easypost_royalmail_2nd_class_signed_for", + "easypost_royalmail_2nd_class_signed_for", + ), + ( + "easypost_royalmail_royal_mail24", + "easypost_royalmail_royal_mail24", + ), + ( + "easypost_royalmail_royal_mail24_signed_for", + "easypost_royalmail_royal_mail24_signed_for", + ), + ( + "easypost_royalmail_royal_mail48", + "easypost_royalmail_royal_mail48", + ), + ( + "easypost_royalmail_royal_mail48_signed_for", + "easypost_royalmail_royal_mail48_signed_for", + ), + ( + "easypost_royalmail_special_delivery_guaranteed1pm", + "easypost_royalmail_special_delivery_guaranteed1pm", + ), + ( + "easypost_royalmail_special_delivery_guaranteed9am", + "easypost_royalmail_special_delivery_guaranteed9am", + ), + ( + "easypost_royalmail_standard_letter1st_class", + "easypost_royalmail_standard_letter1st_class", + ), + ( + "easypost_royalmail_standard_letter1st_class_signed_for", + "easypost_royalmail_standard_letter1st_class_signed_for", + ), + ( + "easypost_royalmail_standard_letter2nd_class", + "easypost_royalmail_standard_letter2nd_class", + ), + ( + "easypost_royalmail_standard_letter2nd_class_signed_for", + "easypost_royalmail_standard_letter2nd_class_signed_for", + ), + ("easypost_royalmail_tracked24", "easypost_royalmail_tracked24"), + ( + "easypost_royalmail_tracked24_high_volume", + "easypost_royalmail_tracked24_high_volume", + ), + ( + "easypost_royalmail_tracked24_high_volume_signature", + "easypost_royalmail_tracked24_high_volume_signature", + ), + ( + "easypost_royalmail_tracked24_signature", + "easypost_royalmail_tracked24_signature", + ), + ("easypost_royalmail_tracked48", "easypost_royalmail_tracked48"), + ( + "easypost_royalmail_tracked48_high_volume", + "easypost_royalmail_tracked48_high_volume", + ), + ( + "easypost_royalmail_tracked48_high_volume_signature", + "easypost_royalmail_tracked48_high_volume_signature", + ), + ( + "easypost_royalmail_tracked48_signature", + "easypost_royalmail_tracked48_signature", + ), + ( + "easypost_seko_ecommerce_standard_tracked", + "easypost_seko_ecommerce_standard_tracked", + ), + ( + "easypost_seko_ecommerce_express_tracked", + "easypost_seko_ecommerce_express_tracked", + ), + ( + "easypost_seko_domestic_express", + "easypost_seko_domestic_express", + ), + ( + "easypost_seko_domestic_standard", + "easypost_seko_domestic_standard", + ), + ("easypost_sendle_easy", "easypost_sendle_easy"), + ("easypost_sendle_pro", "easypost_sendle_pro"), + ("easypost_sendle_plus", "easypost_sendle_plus"), + ( + "easypost_sfexpress_international_standard_express_doc", + "easypost_sfexpress_international_standard_express_doc", + ), + ( + "easypost_sfexpress_international_standard_express_parcel", + "easypost_sfexpress_international_standard_express_parcel", + ), + ( + "easypost_sfexpress_international_economy_express_pilot", + "easypost_sfexpress_international_economy_express_pilot", + ), + ( + "easypost_sfexpress_international_economy_express_doc", + "easypost_sfexpress_international_economy_express_doc", + ), + ("easypost_speedee_delivery", "easypost_speedee_delivery"), + ("easypost_startrack_express", "easypost_startrack_express"), + ("easypost_startrack_premium", "easypost_startrack_premium"), + ( + "easypost_startrack_fixed_price_premium", + "easypost_startrack_fixed_price_premium", + ), + ( + "easypost_tforce_same_day_white_glove", + "easypost_tforce_same_day_white_glove", + ), + ( + "easypost_tforce_next_day_white_glove", + "easypost_tforce_next_day_white_glove", + ), + ("easypost_uds_delivery_service", "easypost_uds_delivery_service"), + ("easypost_ups_standard", "easypost_ups_standard"), + ("easypost_ups_saver", "easypost_ups_saver"), + ("easypost_ups_express_plus", "easypost_ups_express_plus"), + ("easypost_ups_next_day_air", "easypost_ups_next_day_air"), + ( + "easypost_ups_next_day_air_saver", + "easypost_ups_next_day_air_saver", + ), + ( + "easypost_ups_next_day_air_early_am", + "easypost_ups_next_day_air_early_am", + ), + ("easypost_ups_2nd_day_air", "easypost_ups_2nd_day_air"), + ("easypost_ups_2nd_day_air_am", "easypost_ups_2nd_day_air_am"), + ("easypost_ups_3_day_select", "easypost_ups_3_day_select"), + ( + "easypost_ups_mail_expedited_mail_innovations", + "easypost_ups_mail_expedited_mail_innovations", + ), + ( + "easypost_ups_mail_priority_mail_innovations", + "easypost_ups_mail_priority_mail_innovations", + ), + ( + "easypost_ups_mail_economy_mail_innovations", + "easypost_ups_mail_economy_mail_innovations", + ), + ("easypost_usps_library_mail", "easypost_usps_library_mail"), + ( + "easypost_usps_first_class_mail_international", + "easypost_usps_first_class_mail_international", + ), + ( + "easypost_usps_first_class_package_international_service", + "easypost_usps_first_class_package_international_service", + ), + ( + "easypost_usps_priority_mail_international", + "easypost_usps_priority_mail_international", + ), + ( + "easypost_usps_express_mail_international", + "easypost_usps_express_mail_international", + ), + ("easypost_veho_next_day", "easypost_veho_next_day"), + ("easypost_veho_same_day", "easypost_veho_same_day"), + ( + "eshipper_aramex_economy_document_express", + "eshipper_aramex_economy_document_express", + ), + ( + "eshipper_aramex_economy_parcel_express", + "eshipper_aramex_economy_parcel_express", + ), + ( + "eshipper_aramex_priority_letter_express", + "eshipper_aramex_priority_letter_express", + ), + ( + "eshipper_aramex_priority_parcel_express", + "eshipper_aramex_priority_parcel_express", + ), + ( + "eshipper_canada_post_air_parcel_intl", + "eshipper_canada_post_air_parcel_intl", + ), + ( + "eshipper_canada_post_expedited", + "eshipper_canada_post_expedited", + ), + ( + "eshipper_canada_post_expedited_parcel_usa", + "eshipper_canada_post_expedited_parcel_usa", + ), + ( + "eshipper_canada_post_priority_courier", + "eshipper_canada_post_priority_courier", + ), + ("eshipper_canada_post_regular", "eshipper_canada_post_regular"), + ( + "eshipper_canada_post_small_packet", + "eshipper_canada_post_small_packet", + ), + ( + "eshipper_canada_post_small_packet_international_air", + "eshipper_canada_post_small_packet_international_air", + ), + ( + "eshipper_canada_post_small_packet_international_surface", + "eshipper_canada_post_small_packet_international_surface", + ), + ( + "eshipper_canada_post_surface_parcel_intl", + "eshipper_canada_post_surface_parcel_intl", + ), + ( + "eshipper_canada_post_xpress_post", + "eshipper_canada_post_xpress_post", + ), + ( + "eshipper_canada_post_xpress_post_intl", + "eshipper_canada_post_xpress_post_intl", + ), + ( + "eshipper_canada_post_xpress_post_usa", + "eshipper_canada_post_xpress_post_usa", + ), + ( + "eshipper_canada_post_xpresspost", + "eshipper_canada_post_xpresspost", + ), + ( + "eshipper_canpar_express_letter", + "eshipper_canpar_express_letter", + ), + ("eshipper_canpar_express_pak", "eshipper_canpar_express_pak"), + ( + "eshipper_canpar_express_parcel", + "eshipper_canpar_express_parcel", + ), + ("eshipper_canpar_ground", "eshipper_canpar_ground"), + ("eshipper_canpar_international", "eshipper_canpar_international"), + ("eshipper_canpar_select_letter", "eshipper_canpar_select_letter"), + ("eshipper_canpar_select_pak", "eshipper_canpar_select_pak"), + ("eshipper_canpar_select_parcel", "eshipper_canpar_select_parcel"), + ("eshipper_canpar_usa", "eshipper_canpar_usa"), + ( + "eshipper_canpar_usa_select_letter", + "eshipper_canpar_usa_select_letter", + ), + ( + "eshipper_canpar_usa_select_pak", + "eshipper_canpar_usa_select_pak", + ), + ( + "eshipper_canpar_usa_select_parcel", + "eshipper_canpar_usa_select_parcel", + ), + ("eshipper_cpx_canada_post", "eshipper_cpx_canada_post"), + ("eshipper_day_ross_ltl", "eshipper_day_ross_ltl"), + ("eshipper_dhl_dhl_ground", "eshipper_dhl_dhl_ground"), + ("eshipper_dhl_economy_select", "eshipper_dhl_economy_select"), + ("eshipper_dhl_esi_export", "eshipper_dhl_esi_export"), + ("eshipper_dhl_express_1030am", "eshipper_dhl_express_1030am"), + ("eshipper_dhl_express_12pm", "eshipper_dhl_express_12pm"), + ("eshipper_dhl_express_900", "eshipper_dhl_express_900"), + ("eshipper_dhl_express_9am", "eshipper_dhl_express_9am"), + ("eshipper_dhl_express_envelope", "eshipper_dhl_express_envelope"), + ( + "eshipper_dhl_express_worldwide", + "eshipper_dhl_express_worldwide", + ), + ("eshipper_dhl_import_express", "eshipper_dhl_import_express"), + ( + "eshipper_dhl_import_express_12pm", + "eshipper_dhl_import_express_12pm", + ), + ( + "eshipper_dhl_import_express_9am", + "eshipper_dhl_import_express_9am", + ), + ("eshipper_ltl_apex_v", "eshipper_ltl_apex_v"), + ("eshipper_ltl_apex_trucking", "eshipper_ltl_apex_trucking"), + ("eshipper_ltl_apex_trucking_v", "eshipper_ltl_apex_trucking_v"), + ("eshipper_ltl_fastfrate_rail", "eshipper_ltl_fastfrate_rail"), + ( + "eshipper_ltl_kindersley_expedited", + "eshipper_ltl_kindersley_expedited", + ), + ("eshipper_ltl_kindersley_rail", "eshipper_ltl_kindersley_rail"), + ( + "eshipper_ltl_kindersley_regular", + "eshipper_ltl_kindersley_regular", + ), + ("eshipper_ltl_kindersley_road", "eshipper_ltl_kindersley_road"), + ("eshipper_ltl_kingsway_road", "eshipper_ltl_kingsway_road"), + ("eshipper_ltl_m_o_eastbound", "eshipper_ltl_m_o_eastbound"), + ("eshipper_ltl_mo_rail", "eshipper_ltl_mo_rail"), + ( + "eshipper_ltl_national_fastfreight_rail", + "eshipper_ltl_national_fastfreight_rail", + ), + ( + "eshipper_ltl_national_fastfreight_road", + "eshipper_ltl_national_fastfreight_road", + ), + ("eshipper_ltl_vitran_rail", "eshipper_ltl_vitran_rail"), + ("eshipper_ltl_vitran_road", "eshipper_ltl_vitran_road"), + ( + "eshipper_ltl_western_canada_rail", + "eshipper_ltl_western_canada_rail", + ), + ( + "eshipper_federal_express_2day_freight", + "eshipper_federal_express_2day_freight", + ), + ( + "eshipper_federal_express_3day_freight", + "eshipper_federal_express_3day_freight", + ), + ( + "eshipper_federal_express_fedex_2nd_day", + "eshipper_federal_express_fedex_2nd_day", + ), + ( + "eshipper_federal_express_fedex_economy", + "eshipper_federal_express_fedex_economy", + ), + ( + "eshipper_federal_express_fedex_first_overnight", + "eshipper_federal_express_fedex_first_overnight", + ), + ( + "eshipper_federal_express_fedex_ground", + "eshipper_federal_express_fedex_ground", + ), + ( + "eshipper_federal_express_fedex_ground_us", + "eshipper_federal_express_fedex_ground_us", + ), + ( + "eshipper_federal_express_fedex_international_priority", + "eshipper_federal_express_fedex_international_priority", + ), + ( + "eshipper_federal_express_fedex_international_priority_express", + "eshipper_federal_express_fedex_international_priority_express", + ), + ( + "eshipper_federal_express_fedex_intl_economy", + "eshipper_federal_express_fedex_intl_economy", + ), + ( + "eshipper_federal_express_fedex_intl_economy_freight", + "eshipper_federal_express_fedex_intl_economy_freight", + ), + ( + "eshipper_federal_express_fedex_intl_priority", + "eshipper_federal_express_fedex_intl_priority", + ), + ( + "eshipper_federal_express_fedex_intl_priority_express", + "eshipper_federal_express_fedex_intl_priority_express", + ), + ( + "eshipper_federal_express_fedex_intl_priority_freight", + "eshipper_federal_express_fedex_intl_priority_freight", + ), + ( + "eshipper_federal_express_fedex_priority", + "eshipper_federal_express_fedex_priority", + ), + ( + "eshipper_federal_express_fedex_standard_overnight", + "eshipper_federal_express_fedex_standard_overnight", + ), + ("eshipper_flashbird_ground", "eshipper_flashbird_ground"), + ("eshipper_fleet_optics_ground", "eshipper_fleet_optics_ground"), + ( + "eshipper_project44_a_duie_pyle", + "eshipper_project44_a_duie_pyle", + ), + ( + "eshipper_project44_aaa_cooper_transportation", + "eshipper_project44_aaa_cooper_transportation", + ), + ( + "eshipper_project44_aberdeen_express", + "eshipper_project44_aberdeen_express", + ), + ("eshipper_project44_abfs", "eshipper_project44_abfs"), + ( + "eshipper_project44_averitt_express", + "eshipper_project44_averitt_express", + ), + ( + "eshipper_project44_brown_transfer_company", + "eshipper_project44_brown_transfer_company", + ), + ( + "eshipper_project44_central_freight_lines", + "eshipper_project44_central_freight_lines", + ), + ( + "eshipper_project44_central_transport", + "eshipper_project44_central_transport", + ), + ( + "eshipper_project44_chicago_suburban_express", + "eshipper_project44_chicago_suburban_express", + ), + ( + "eshipper_project44_clear_lane_freight", + "eshipper_project44_clear_lane_freight", + ), + ( + "eshipper_project44_con_way_freight", + "eshipper_project44_con_way_freight", + ), + ( + "eshipper_project44_crosscountry_courier", + "eshipper_project44_crosscountry_courier", + ), + ("eshipper_project44_day_ross", "eshipper_project44_day_ross"), + ("eshipper_project44_day_ross_v", "eshipper_project44_day_ross_v"), + ( + "eshipper_project44_dayton_freight_lines", + "eshipper_project44_dayton_freight_lines", + ), + ( + "eshipper_project44_dependable_highway_express", + "eshipper_project44_dependable_highway_express", + ), + ( + "eshipper_project44_dohrn_transfer_company", + "eshipper_project44_dohrn_transfer_company", + ), + ( + "eshipper_project44_dugan_truck_line", + "eshipper_project44_dugan_truck_line", + ), + ( + "eshipper_project44_estes_express_lines", + "eshipper_project44_estes_express_lines", + ), + ( + "eshipper_project44_expedited_freight_systems", + "eshipper_project44_expedited_freight_systems", + ), + ( + "eshipper_project44_fedex_freight_canada", + "eshipper_project44_fedex_freight_canada", + ), + ( + "eshipper_project44_fedex_freight_east", + "eshipper_project44_fedex_freight_east", + ), + ( + "eshipper_project44_fedex_freight_national_canada", + "eshipper_project44_fedex_freight_national_canada", + ), + ( + "eshipper_project44_fedex_freight_national_usa", + "eshipper_project44_fedex_freight_national_usa", + ), + ( + "eshipper_project44_fedex_freight_usa", + "eshipper_project44_fedex_freight_usa", + ), + ( + "eshipper_project44_fedex_national", + "eshipper_project44_fedex_national", + ), + ("eshipper_project44_forwardair", "eshipper_project44_forwardair"), + ( + "eshipper_project44_frontline_freight", + "eshipper_project44_frontline_freight", + ), + ( + "eshipper_project44_holland_motor_express", + "eshipper_project44_holland_motor_express", + ), + ( + "eshipper_project44_lakeville_motor_express", + "eshipper_project44_lakeville_motor_express", + ), + ( + "eshipper_project44_manitoulin_tlx_inc", + "eshipper_project44_manitoulin_tlx_inc", + ), + ( + "eshipper_project44_midwest_motor_express", + "eshipper_project44_midwest_motor_express", + ), + ( + "eshipper_project44_monroe_transportation_services", + "eshipper_project44_monroe_transportation_services", + ), + ( + "eshipper_project44_n_m_transfer", + "eshipper_project44_n_m_transfer", + ), + ( + "eshipper_project44_new_england_motor_freight", + "eshipper_project44_new_england_motor_freight", + ), + ( + "eshipper_project44_new_penn_motor_express", + "eshipper_project44_new_penn_motor_express", + ), + ("eshipper_project44_pitt_ohio", "eshipper_project44_pitt_ohio"), + ("eshipper_project44_polaris", "eshipper_project44_polaris"), + ( + "eshipper_project44_purolator_freight", + "eshipper_project44_purolator_freight", + ), + ( + "eshipper_project44_rl_carriers", + "eshipper_project44_rl_carriers", + ), + ( + "eshipper_project44_roadrunner_transportation_services", + "eshipper_project44_roadrunner_transportation_services", + ), + ( + "eshipper_project44_saia_motor_freight", + "eshipper_project44_saia_motor_freight", + ), + ( + "eshipper_project44_southeastern_freight_lines", + "eshipper_project44_southeastern_freight_lines", + ), + ( + "eshipper_project44_southwestern_motor_transport", + "eshipper_project44_southwestern_motor_transport", + ), + ( + "eshipper_project44_standard_forwarding", + "eshipper_project44_standard_forwarding", + ), + ( + "eshipper_project44_total_transportation_distribution", + "eshipper_project44_total_transportation_distribution", + ), + ( + "eshipper_project44_tst_overland_express", + "eshipper_project44_tst_overland_express", + ), + ("eshipper_project44_ups", "eshipper_project44_ups"), + ( + "eshipper_project44_usf_reddaway", + "eshipper_project44_usf_reddaway", + ), + ( + "eshipper_project44_valley_cartage", + "eshipper_project44_valley_cartage", + ), + ( + "eshipper_project44_vision_express_ltl", + "eshipper_project44_vision_express_ltl", + ), + ( + "eshipper_project44_ward_trucking", + "eshipper_project44_ward_trucking", + ), + ( + "eshipper_project44_xpo_logistics", + "eshipper_project44_xpo_logistics", + ), + ( + "eshipper_project44_xpress_global_systems", + "eshipper_project44_xpress_global_systems", + ), + ("eshipper_project44_yrc", "eshipper_project44_yrc"), + ( + "eshipper_purolator_purolator_express", + "eshipper_purolator_purolator_express", + ), + ( + "eshipper_purolator_purolator_express_1030", + "eshipper_purolator_purolator_express_1030", + ), + ( + "eshipper_purolator_purolator_express_9am", + "eshipper_purolator_purolator_express_9am", + ), + ( + "eshipper_purolator_purolator_expresscheque", + "eshipper_purolator_purolator_expresscheque", + ), + ( + "eshipper_purolator_purolator_ground", + "eshipper_purolator_purolator_ground", + ), + ( + "eshipper_purolator_purolator_ground_1030", + "eshipper_purolator_purolator_ground_1030", + ), + ( + "eshipper_purolator_purolator_ground_9am", + "eshipper_purolator_purolator_ground_9am", + ), + ("eshipper_purolator_puroletter", "eshipper_purolator_puroletter"), + ( + "eshipper_purolator_puroletter_1030", + "eshipper_purolator_puroletter_1030", + ), + ( + "eshipper_purolator_puroletter_9am", + "eshipper_purolator_puroletter_9am", + ), + ("eshipper_purolator_puropak", "eshipper_purolator_puropak"), + ( + "eshipper_purolator_puropak_1030", + "eshipper_purolator_puropak_1030", + ), + ( + "eshipper_purolator_puropak_9am", + "eshipper_purolator_puropak_9am", + ), + ("eshipper_pyk_ground_advantage", "eshipper_pyk_ground_advantage"), + ("eshipper_pyk_priority_mail", "eshipper_pyk_priority_mail"), + ( + "eshipper_sameday_9am_guaranteed", + "eshipper_sameday_9am_guaranteed", + ), + ("eshipper_sameday_am_service", "eshipper_sameday_am_service"), + ( + "eshipper_sameday_ground_service", + "eshipper_sameday_ground_service", + ), + ( + "eshipper_sameday_h1_deliver_to_curbside", + "eshipper_sameday_h1_deliver_to_curbside", + ), + ( + "eshipper_sameday_h2_delivery_to_room_of_choice", + "eshipper_sameday_h2_delivery_to_room_of_choice", + ), + ( + "eshipper_sameday_h3_delivery_packaging_removal", + "eshipper_sameday_h3_delivery_packaging_removal", + ), + ( + "eshipper_sameday_h4_delivery_to_curbside", + "eshipper_sameday_h4_delivery_to_curbside", + ), + ( + "eshipper_sameday_h5_delivery_to_room_of_choice_2_man", + "eshipper_sameday_h5_delivery_to_room_of_choice_2_man", + ), + ( + "eshipper_sameday_h6_delivery_packaging_removal_2_man", + "eshipper_sameday_h6_delivery_packaging_removal_2_man", + ), + ("eshipper_sameday_ltl_service", "eshipper_sameday_ltl_service"), + ("eshipper_sameday_pm_service", "eshipper_sameday_pm_service"), + ( + "eshipper_sameday_urgent_letter", + "eshipper_sameday_urgent_letter", + ), + ("eshipper_sameday_urgent_pac", "eshipper_sameday_urgent_pac"), + ("eshipper_skip", "eshipper_skip"), + ( + "eshipper_smartepost_intl_dhl_parcel_international_direct_ngr", + "eshipper_smartepost_intl_dhl_parcel_international_direct_ngr", + ), + ( + "eshipper_smartepost_intl_global_mail_business_priority", + "eshipper_smartepost_intl_global_mail_business_priority", + ), + ( + "eshipper_smartepost_intl_global_mail_business_standard", + "eshipper_smartepost_intl_global_mail_business_standard", + ), + ( + "eshipper_smartepost_intl_global_mail_packet_plus_priority", + "eshipper_smartepost_intl_global_mail_packet_plus_priority", + ), + ( + "eshipper_smartepost_intl_global_mail_packet_priority", + "eshipper_smartepost_intl_global_mail_packet_priority", + ), + ( + "eshipper_smartepost_intl_global_mail_packet_standard", + "eshipper_smartepost_intl_global_mail_packet_standard", + ), + ( + "eshipper_smartepost_intl_global_mail_parcel_direct_priority_yyz", + "eshipper_smartepost_intl_global_mail_parcel_direct_priority_yyz", + ), + ( + "eshipper_smartepost_intl_global_mail_parcel_direct_standard_yyz", + "eshipper_smartepost_intl_global_mail_parcel_direct_standard_yyz", + ), + ( + "eshipper_smartepost_intl_global_mail_parcel_priority", + "eshipper_smartepost_intl_global_mail_parcel_priority", + ), + ( + "eshipper_smartepost_intl_global_mail_parcel_standard", + "eshipper_smartepost_intl_global_mail_parcel_standard", + ), + ("eshipper_ups_expedited", "eshipper_ups_expedited"), + ("eshipper_ups_express", "eshipper_ups_express"), + ("eshipper_ups_express_early_am", "eshipper_ups_express_early_am"), + ("eshipper_ups_ground", "eshipper_ups_ground"), + ( + "eshipper_ups_second_day_air_am", + "eshipper_ups_second_day_air_am", + ), + ("eshipper_ups_standard", "eshipper_ups_standard"), + ("eshipper_ups_three_day_select", "eshipper_ups_three_day_select"), + ("eshipper_ups_ups_saver", "eshipper_ups_ups_saver"), + ( + "eshipper_ups_worldwide_expedited", + "eshipper_ups_worldwide_expedited", + ), + ( + "eshipper_ups_worldwide_express", + "eshipper_ups_worldwide_express", + ), + ( + "eshipper_ups_worldwide_express_plus", + "eshipper_ups_worldwide_express_plus", + ), + ( + "eshipper_usps_first_class_mail", + "eshipper_usps_first_class_mail", + ), + ( + "eshipper_usps_first_class_package_return_service", + "eshipper_usps_first_class_package_return_service", + ), + ("eshipper_usps_library_mail", "eshipper_usps_library_mail"), + ("eshipper_usps_media_mail", "eshipper_usps_media_mail"), + ("eshipper_usps_parcel_select", "eshipper_usps_parcel_select"), + ("eshipper_usps_pbx", "eshipper_usps_pbx"), + ("eshipper_usps_pbx_lightweight", "eshipper_usps_pbx_lightweight"), + ("eshipper_usps_priority_mail", "eshipper_usps_priority_mail"), + ( + "eshipper_usps_priority_mail_express", + "eshipper_usps_priority_mail_express", + ), + ( + "eshipper_usps_priority_mail_open_and_distribute", + "eshipper_usps_priority_mail_open_and_distribute", + ), + ( + "eshipper_usps_priority_mail_return_service", + "eshipper_usps_priority_mail_return_service", + ), + ( + "eshipper_usps_retail_ground_formerly_standard_post", + "eshipper_usps_retail_ground_formerly_standard_post", + ), + ("eshipper_all", "eshipper_all"), + ("eshipper_fedex_priority", "eshipper_fedex_priority"), + ( + "eshipper_fedex_first_overnight", + "eshipper_fedex_first_overnight", + ), + ("eshipper_fedex_ground", "eshipper_fedex_ground"), + ( + "eshipper_fedex_standard_overnight", + "eshipper_fedex_standard_overnight", + ), + ("eshipper_fedex_2nd_day", "eshipper_fedex_2nd_day"), + ("eshipper_fedex_express_saver", "eshipper_fedex_express_saver"), + ( + "eshipper_fedex_international_economy", + "eshipper_fedex_international_economy", + ), + ("eshipper_purolator_air", "eshipper_purolator_air"), + ("eshipper_purolator_air_9_am", "eshipper_purolator_air_9_am"), + ("eshipper_purolator_air_10_30", "eshipper_purolator_air_10_30"), + ("eshipper_purolator_letter", "eshipper_purolator_letter"), + ( + "eshipper_purolator_letter_9_am", + "eshipper_purolator_letter_9_am", + ), + ( + "eshipper_purolator_letter_10_30", + "eshipper_purolator_letter_10_30", + ), + ("eshipper_purolator_pak", "eshipper_purolator_pak"), + ("eshipper_purolator_pak_9_am", "eshipper_purolator_pak_9_am"), + ("eshipper_purolator_pak_10_30", "eshipper_purolator_pak_10_30"), + ("eshipper_purolator_ground", "eshipper_purolator_ground"), + ( + "eshipper_purolator_ground_9_am", + "eshipper_purolator_ground_9_am", + ), + ( + "eshipper_purolator_ground_10_30", + "eshipper_purolator_ground_10_30", + ), + ( + "eshipper_canada_worldwide_same_day", + "eshipper_canada_worldwide_same_day", + ), + ( + "eshipper_canada_worldwide_next_flight_out", + "eshipper_canada_worldwide_next_flight_out", + ), + ( + "eshipper_canada_worldwide_air_freight", + "eshipper_canada_worldwide_air_freight", + ), + ("eshipper_canada_worldwide_ltl", "eshipper_canada_worldwide_ltl"), + ( + "eshipper_dhl_express_worldwide", + "eshipper_dhl_express_worldwide", + ), + ("eshipper_dhl_express_12_pm", "eshipper_dhl_express_12_pm"), + ("eshipper_dhl_express_10_30_am", "eshipper_dhl_express_10_30_am"), + ("eshipper_dhl_esi_export", "eshipper_dhl_esi_export"), + ( + "eshipper_dhl_international_express", + "eshipper_dhl_international_express", + ), + ( + "eshipper_ups_express_next_day_air", + "eshipper_ups_express_next_day_air", + ), + ( + "eshipper_ups_expedited_second_day_air", + "eshipper_ups_expedited_second_day_air", + ), + ( + "eshipper_ups_worldwide_express", + "eshipper_ups_worldwide_express", + ), + ( + "eshipper_ups_worldwide_expedited", + "eshipper_ups_worldwide_expedited", + ), + ("eshipper_ups_standard_ground", "eshipper_ups_standard_ground"), + ( + "eshipper_ups_express_early_am_next_day_air_early_am", + "eshipper_ups_express_early_am_next_day_air_early_am", + ), + ("eshipper_ups_three_day_select", "eshipper_ups_three_day_select"), + ("eshipper_ups_saver", "eshipper_ups_saver"), + ("eshipper_ups_ground", "eshipper_ups_ground"), + ("eshipper_ups_next_day_saver", "eshipper_ups_next_day_saver"), + ( + "eshipper_ups_worldwide_express_plus", + "eshipper_ups_worldwide_express_plus", + ), + ( + "eshipper_ups_second_day_air_am", + "eshipper_ups_second_day_air_am", + ), + ("eshipper_canada_post_priority", "eshipper_canada_post_priority"), + ( + "eshipper_canada_post_xpresspost", + "eshipper_canada_post_xpresspost", + ), + ( + "eshipper_canada_post_expedited", + "eshipper_canada_post_expedited", + ), + ("eshipper_canada_post_regular", "eshipper_canada_post_regular"), + ( + "eshipper_canada_post_xpresspost_usa", + "eshipper_canada_post_xpresspost_usa", + ), + ( + "eshipper_canada_post_xpresspost_intl", + "eshipper_canada_post_xpresspost_intl", + ), + ( + "eshipper_canada_post_air_parcel_intl", + "eshipper_canada_post_air_parcel_intl", + ), + ( + "eshipper_canada_post_surface_parcel_intl", + "eshipper_canada_post_surface_parcel_intl", + ), + ( + "eshipper_canada_post_expedited_parcel_usa", + "eshipper_canada_post_expedited_parcel_usa", + ), + ("eshipper_tst_ltl", "eshipper_tst_ltl"), + ( + "eshipper_ltl_chicago_suburban_express", + "eshipper_ltl_chicago_suburban_express", + ), + ( + "eshipper_ltl_fedex_freight_east", + "eshipper_ltl_fedex_freight_east", + ), + ( + "eshipper_ltl_fedex_freight_west", + "eshipper_ltl_fedex_freight_west", + ), + ( + "eshipper_ltl_mid_states_express", + "eshipper_ltl_mid_states_express", + ), + ( + "eshipper_ltl_new_england_motor_freight", + "eshipper_ltl_new_england_motor_freight", + ), + ("eshipper_ltl_new_penn", "eshipper_ltl_new_penn"), + ("eshipper_ltl_oak_harbor", "eshipper_ltl_oak_harbor"), + ("eshipper_ltl_pitt_ohio", "eshipper_ltl_pitt_ohio"), + ("eshipper_ltl_r_l_carriers", "eshipper_ltl_r_l_carriers"), + ("eshipper_ltl_saia", "eshipper_ltl_saia"), + ("eshipper_ltl_usf_reddaway", "eshipper_ltl_usf_reddaway"), + ("eshipper_ltl_vitran_express", "eshipper_ltl_vitran_express"), + ("eshipper_ltl_wilson_trucking", "eshipper_ltl_wilson_trucking"), + ( + "eshipper_ltl_yellow_transportation", + "eshipper_ltl_yellow_transportation", + ), + ("eshipper_ltl_roadway", "eshipper_ltl_roadway"), + ("eshipper_ltl_fedex_national", "eshipper_ltl_fedex_national"), + ("eshipper_wilson_trucking_tfc", "eshipper_wilson_trucking_tfc"), + ( + "eshipper_aaa_cooper_transportation", + "eshipper_aaa_cooper_transportation", + ), + ("eshipper_roadrunner_dawes", "eshipper_roadrunner_dawes"), + ( + "eshipper_new_england_motor_freight", + "eshipper_new_england_motor_freight", + ), + ( + "eshipper_new_penn_motor_express", + "eshipper_new_penn_motor_express", + ), + ("eshipper_dayton_freight", "eshipper_dayton_freight"), + ( + "eshipper_southeastern_freightway", + "eshipper_southeastern_freightway", + ), + ("eshipper_saia_inc", "eshipper_saia_inc"), + ("eshipper_conway", "eshipper_conway"), + ("eshipper_roadway", "eshipper_roadway"), + ("eshipper_usf_reddaway", "eshipper_usf_reddaway"), + ("eshipper_usf_holland", "eshipper_usf_holland"), + ( + "eshipper_dependable_highway_express", + "eshipper_dependable_highway_express", + ), + ("eshipper_day_and_ross", "eshipper_day_and_ross"), + ("eshipper_day_and_ross_r_and_l", "eshipper_day_and_ross_r_and_l"), + ("eshipper_ups", "eshipper_ups"), + ("eshipper_aaa_cooper", "eshipper_aaa_cooper"), + ("eshipper_ama_transportation", "eshipper_ama_transportation"), + ("eshipper_averitt_express", "eshipper_averitt_express"), + ("eshipper_central_freight", "eshipper_central_freight"), + ("eshipper_conway_us", "eshipper_conway_us"), + ("eshipper_dayton", "eshipper_dayton"), + ("eshipper_drug_transport", "eshipper_drug_transport"), + ("eshipper_estes", "eshipper_estes"), + ("eshipper_land_air_express", "eshipper_land_air_express"), + ("eshipper_fedex_west", "eshipper_fedex_west"), + ("eshipper_fedex_national", "eshipper_fedex_national"), + ("eshipper_usf_holland_us", "eshipper_usf_holland_us"), + ("eshipper_lakeville_m_express", "eshipper_lakeville_m_express"), + ("eshipper_milan_express", "eshipper_milan_express"), + ("eshipper_nebraska_transport", "eshipper_nebraska_transport"), + ("eshipper_new_england", "eshipper_new_england"), + ("eshipper_new_penn", "eshipper_new_penn"), + ("eshipper_a_duie_pyle", "eshipper_a_duie_pyle"), + ("eshipper_roadway_us", "eshipper_roadway_us"), + ("eshipper_usf_reddaway_us", "eshipper_usf_reddaway_us"), + ("eshipper_rhody_transportation", "eshipper_rhody_transportation"), + ("eshipper_saia_motor_freight", "eshipper_saia_motor_freight"), + ("eshipper_southeastern_frgt", "eshipper_southeastern_frgt"), + ("eshipper_pitt_ohio", "eshipper_pitt_ohio"), + ("eshipper_ward", "eshipper_ward"), + ("eshipper_wilson", "eshipper_wilson"), + ("eshipper_chi_cargo", "eshipper_chi_cargo"), + ("eshipper_tax_air", "eshipper_tax_air"), + ("eshipper_fedex_east", "eshipper_fedex_east"), + ("eshipper_central_transport", "eshipper_central_transport"), + ("eshipper_roadrunner", "eshipper_roadrunner"), + ("eshipper_r_and_l_carriers", "eshipper_r_and_l_carriers"), + ("eshipper_estes_us", "eshipper_estes_us"), + ("eshipper_yrc_roadway", "eshipper_yrc_roadway"), + ("eshipper_central_transport_us", "eshipper_central_transport_us"), + ( + "eshipper_absolute_transportation_services", + "eshipper_absolute_transportation_services", + ), + ("eshipper_blue_sky_express", "eshipper_blue_sky_express"), + ("eshipper_galasso_trucking", "eshipper_galasso_trucking"), + ("eshipper_griley_air_freight", "eshipper_griley_air_freight"), + ("eshipper_jet_transportation", "eshipper_jet_transportation"), + ( + "eshipper_metro_transportation_logistics", + "eshipper_metro_transportation_logistics", + ), + ("eshipper_oak_harbor", "eshipper_oak_harbor"), + ("eshipper_stream_links_express", "eshipper_stream_links_express"), + ("eshipper_tiffany_trucking", "eshipper_tiffany_trucking"), + ("eshipper_ups_freight", "eshipper_ups_freight"), + ("eshipper_roadrunner_us", "eshipper_roadrunner_us"), + ( + "eshipper_global_mail_parcel_priority", + "eshipper_global_mail_parcel_priority", + ), + ( + "eshipper_global_mail_parcel_standard", + "eshipper_global_mail_parcel_standard", + ), + ( + "eshipper_global_mail_packet_plus_priority", + "eshipper_global_mail_packet_plus_priority", + ), + ( + "eshipper_global_mail_packet_priority", + "eshipper_global_mail_packet_priority", + ), + ( + "eshipper_global_mail_packet_standard", + "eshipper_global_mail_packet_standard", + ), + ( + "eshipper_global_mail_business_priority", + "eshipper_global_mail_business_priority", + ), + ( + "eshipper_global_mail_business_standard", + "eshipper_global_mail_business_standard", + ), + ( + "eshipper_global_mail_parcel_direct_priority", + "eshipper_global_mail_parcel_direct_priority", + ), + ( + "eshipper_global_mail_parcel_direct_standard", + "eshipper_global_mail_parcel_direct_standard", + ), + ("eshipper_canpar_ground", "eshipper_canpar_ground"), + ("eshipper_canpar_select_parcel", "eshipper_canpar_select_parcel"), + ( + "eshipper_canpar_express_parcel", + "eshipper_canpar_express_parcel", + ), + ("eshipper_fleet_optics_ground", "eshipper_fleet_optics_ground"), + ( + "fedex_international_priority_express", + "fedex_international_priority_express", + ), + ("fedex_international_first", "fedex_international_first"), + ("fedex_international_priority", "fedex_international_priority"), + ("fedex_international_economy", "fedex_international_economy"), + ("fedex_ground", "fedex_ground"), + ("fedex_cargo_mail", "fedex_cargo_mail"), + ( + "fedex_cargo_international_premium", + "fedex_cargo_international_premium", + ), + ("fedex_first_overnight", "fedex_first_overnight"), + ("fedex_first_overnight_freight", "fedex_first_overnight_freight"), + ("fedex_1_day_freight", "fedex_1_day_freight"), + ("fedex_2_day_freight", "fedex_2_day_freight"), + ("fedex_3_day_freight", "fedex_3_day_freight"), + ( + "fedex_international_priority_freight", + "fedex_international_priority_freight", + ), + ( + "fedex_international_economy_freight", + "fedex_international_economy_freight", + ), + ( + "fedex_cargo_airport_to_airport", + "fedex_cargo_airport_to_airport", + ), + ( + "fedex_international_priority_distribution", + "fedex_international_priority_distribution", + ), + ( + "fedex_ip_direct_distribution_freight", + "fedex_ip_direct_distribution_freight", + ), + ( + "fedex_intl_ground_distribution", + "fedex_intl_ground_distribution", + ), + ("fedex_ground_home_delivery", "fedex_ground_home_delivery"), + ("fedex_smart_post", "fedex_smart_post"), + ("fedex_priority_overnight", "fedex_priority_overnight"), + ("fedex_standard_overnight", "fedex_standard_overnight"), + ("fedex_2_day", "fedex_2_day"), + ("fedex_2_day_am", "fedex_2_day_am"), + ("fedex_express_saver", "fedex_express_saver"), + ("fedex_same_day", "fedex_same_day"), + ("fedex_same_day_city", "fedex_same_day_city"), + ("fedex_one_day_freight", "fedex_one_day_freight"), + ( + "fedex_international_economy_distribution", + "fedex_international_economy_distribution", + ), + ( + "fedex_international_connect_plus", + "fedex_international_connect_plus", + ), + ( + "fedex_international_distribution_freight", + "fedex_international_distribution_freight", + ), + ("fedex_regional_economy", "fedex_regional_economy"), + ("fedex_next_day_freight", "fedex_next_day_freight"), + ("fedex_next_day", "fedex_next_day"), + ("fedex_next_day_10am", "fedex_next_day_10am"), + ("fedex_next_day_12pm", "fedex_next_day_12pm"), + ("fedex_next_day_end_of_day", "fedex_next_day_end_of_day"), + ("fedex_distance_deferred", "fedex_distance_deferred"), + ( + "fedex_europe_first_international_priority", + "fedex_europe_first_international_priority", + ), + ("fedex_1_day_freight", "fedex_1_day_freight"), + ("fedex_2_day", "fedex_2_day"), + ("fedex_2_day_am", "fedex_2_day_am"), + ("fedex_2_day_freight", "fedex_2_day_freight"), + ("fedex_3_day_freight", "fedex_3_day_freight"), + ( + "fedex_cargo_airport_to_airport", + "fedex_cargo_airport_to_airport", + ), + ( + "fedex_cargo_freight_forwarding", + "fedex_cargo_freight_forwarding", + ), + ( + "fedex_cargo_international_express_freight", + "fedex_cargo_international_express_freight", + ), + ( + "fedex_cargo_international_premium", + "fedex_cargo_international_premium", + ), + ("fedex_cargo_mail", "fedex_cargo_mail"), + ("fedex_cargo_registered_mail", "fedex_cargo_registered_mail"), + ("fedex_cargo_surface_mail", "fedex_cargo_surface_mail"), + ( + "fedex_custom_critical_air_expedite", + "fedex_custom_critical_air_expedite", + ), + ( + "fedex_custom_critical_air_expedite_exclusive_use", + "fedex_custom_critical_air_expedite_exclusive_use", + ), + ( + "fedex_custom_critical_air_expedite_network", + "fedex_custom_critical_air_expedite_network", + ), + ( + "fedex_custom_critical_charter_air", + "fedex_custom_critical_charter_air", + ), + ( + "fedex_custom_critical_point_to_point", + "fedex_custom_critical_point_to_point", + ), + ( + "fedex_custom_critical_surface_expedite", + "fedex_custom_critical_surface_expedite", + ), + ( + "fedex_custom_critical_surface_expedite_exclusive_use", + "fedex_custom_critical_surface_expedite_exclusive_use", + ), + ( + "fedex_custom_critical_temp_assure_air", + "fedex_custom_critical_temp_assure_air", + ), + ( + "fedex_custom_critical_temp_assure_validated_air", + "fedex_custom_critical_temp_assure_validated_air", + ), + ( + "fedex_custom_critical_white_glove_services", + "fedex_custom_critical_white_glove_services", + ), + ("fedex_distance_deferred", "fedex_distance_deferred"), + ("fedex_express_saver", "fedex_express_saver"), + ("fedex_first_freight", "fedex_first_freight"), + ("fedex_freight_economy", "fedex_freight_economy"), + ("fedex_freight_priority", "fedex_freight_priority"), + ("fedex_ground", "fedex_ground"), + ( + "fedex_international_priority_plus", + "fedex_international_priority_plus", + ), + ("fedex_next_day_afternoon", "fedex_next_day_afternoon"), + ("fedex_next_day_early_morning", "fedex_next_day_early_morning"), + ("fedex_next_day_end_of_day", "fedex_next_day_end_of_day"), + ("fedex_next_day_freight", "fedex_next_day_freight"), + ("fedex_next_day_mid_morning", "fedex_next_day_mid_morning"), + ("fedex_first_overnight", "fedex_first_overnight"), + ("fedex_ground_home_delivery", "fedex_ground_home_delivery"), + ( + "fedex_international_distribution_freight", + "fedex_international_distribution_freight", + ), + ("fedex_international_economy", "fedex_international_economy"), + ( + "fedex_international_economy_distribution", + "fedex_international_economy_distribution", + ), + ( + "fedex_international_economy_freight", + "fedex_international_economy_freight", + ), + ("fedex_international_first", "fedex_international_first"), + ("fedex_international_ground", "fedex_international_ground"), + ("fedex_international_priority", "fedex_international_priority"), + ( + "fedex_international_priority_distribution", + "fedex_international_priority_distribution", + ), + ( + "fedex_international_priority_express", + "fedex_international_priority_express", + ), + ( + "fedex_international_priority_freight", + "fedex_international_priority_freight", + ), + ("fedex_priority_overnight", "fedex_priority_overnight"), + ("fedex_same_day", "fedex_same_day"), + ("fedex_same_day_city", "fedex_same_day_city"), + ( + "fedex_same_day_metro_afternoon", + "fedex_same_day_metro_afternoon", + ), + ("fedex_same_day_metro_morning", "fedex_same_day_metro_morning"), + ("fedex_same_day_metro_rush", "fedex_same_day_metro_rush"), + ("fedex_smart_post", "fedex_smart_post"), + ("fedex_standard_overnight", "fedex_standard_overnight"), + ( + "fedex_transborder_distribution_consolidation", + "fedex_transborder_distribution_consolidation", + ), + ("freightcom_all", "freightcom_all"), + ("freightcom_usf_holland", "freightcom_usf_holland"), + ("freightcom_central_transport", "freightcom_central_transport"), + ("freightcom_estes", "freightcom_estes"), + ("freightcom_canpar_ground", "freightcom_canpar_ground"), + ("freightcom_canpar_select", "freightcom_canpar_select"), + ("freightcom_canpar_overnight", "freightcom_canpar_overnight"), + ("freightcom_dicom_ground", "freightcom_dicom_ground"), + ("freightcom_purolator_ground", "freightcom_purolator_ground"), + ("freightcom_purolator_express", "freightcom_purolator_express"), + ( + "freightcom_purolator_express_9_am", + "freightcom_purolator_express_9_am", + ), + ( + "freightcom_purolator_express_10_30_am", + "freightcom_purolator_express_10_30_am", + ), + ( + "freightcom_purolator_ground_us", + "freightcom_purolator_ground_us", + ), + ( + "freightcom_purolator_express_us", + "freightcom_purolator_express_us", + ), + ( + "freightcom_purolator_express_us_9_am", + "freightcom_purolator_express_us_9_am", + ), + ( + "freightcom_purolator_express_us_10_30_am", + "freightcom_purolator_express_us_10_30_am", + ), + ( + "freightcom_fedex_express_saver", + "freightcom_fedex_express_saver", + ), + ("freightcom_fedex_ground", "freightcom_fedex_ground"), + ("freightcom_fedex_2day", "freightcom_fedex_2day"), + ( + "freightcom_fedex_priority_overnight", + "freightcom_fedex_priority_overnight", + ), + ( + "freightcom_fedex_standard_overnight", + "freightcom_fedex_standard_overnight", + ), + ( + "freightcom_fedex_first_overnight", + "freightcom_fedex_first_overnight", + ), + ( + "freightcom_fedex_international_priority", + "freightcom_fedex_international_priority", + ), + ( + "freightcom_fedex_international_economy", + "freightcom_fedex_international_economy", + ), + ("freightcom_ups_standard", "freightcom_ups_standard"), + ("freightcom_ups_expedited", "freightcom_ups_expedited"), + ("freightcom_ups_express_saver", "freightcom_ups_express_saver"), + ("freightcom_ups_express", "freightcom_ups_express"), + ("freightcom_ups_express_early", "freightcom_ups_express_early"), + ("freightcom_ups_3day_select", "freightcom_ups_3day_select"), + ( + "freightcom_ups_worldwide_expedited", + "freightcom_ups_worldwide_expedited", + ), + ( + "freightcom_ups_worldwide_express", + "freightcom_ups_worldwide_express", + ), + ( + "freightcom_ups_worldwide_express_plus", + "freightcom_ups_worldwide_express_plus", + ), + ( + "freightcom_ups_worldwide_express_saver", + "freightcom_ups_worldwide_express_saver", + ), + ("freightcom_dhl_express_easy", "freightcom_dhl_express_easy"), + ("freightcom_dhl_express_10_30", "freightcom_dhl_express_10_30"), + ( + "freightcom_dhl_express_worldwide", + "freightcom_dhl_express_worldwide", + ), + ("freightcom_dhl_express_12_00", "freightcom_dhl_express_12_00"), + ("freightcom_dhl_economy_select", "freightcom_dhl_economy_select"), + ( + "freightcom_dhl_ecommerce_am_service", + "freightcom_dhl_ecommerce_am_service", + ), + ( + "freightcom_dhl_ecommerce_ground_service", + "freightcom_dhl_ecommerce_ground_service", + ), + ( + "freightcom_canadapost_regular_parcel", + "freightcom_canadapost_regular_parcel", + ), + ( + "freightcom_canadapost_expedited_parcel", + "freightcom_canadapost_expedited_parcel", + ), + ( + "freightcom_canadapost_xpresspost", + "freightcom_canadapost_xpresspost", + ), + ( + "freightcom_canadapost_priority", + "freightcom_canadapost_priority", + ), + ("standard_service", "standard_service"), + ("geodis_EXP", "geodis_EXP"), + ("geodis_MES", "geodis_MES"), + ("geodis_express_france", "geodis_express_france"), + ( + "geodis_retour_trans_fr_messagerie_plus", + "geodis_retour_trans_fr_messagerie_plus", + ), + ("letter_ordered", "letter_ordered"), + ("letter_simple", "letter_simple"), + ("letter_valued", "letter_valued"), + ("package_ordered", "package_ordered"), + ("package_simple", "package_simple"), + ("package_valued", "package_valued"), + ("parcel_simple", "parcel_simple"), + ("parcel_valued", "parcel_valued"), + ("postcard_ordered", "postcard_ordered"), + ("postcard_simple", "postcard_simple"), + ("sekogram_simple", "sekogram_simple"), + ("sprint_simple", "sprint_simple"), + ("yes_ordered_value", "yes_ordered_value"), + ("locate2u_local_delivery", "locate2u_local_delivery"), + ("purolator_express_9_am", "purolator_express_9_am"), + ("purolator_express_us", "purolator_express_us"), + ("purolator_express_10_30_am", "purolator_express_10_30_am"), + ("purolator_express_us_9_am", "purolator_express_us_9_am"), + ("purolator_express_12_pm", "purolator_express_12_pm"), + ("purolator_express_us_10_30_am", "purolator_express_us_10_30_am"), + ("purolator_express", "purolator_express"), + ("purolator_express_us_12_00", "purolator_express_us_12_00"), + ("purolator_express_evening", "purolator_express_evening"), + ("purolator_express_envelope_us", "purolator_express_envelope_us"), + ( + "purolator_express_envelope_9_am", + "purolator_express_envelope_9_am", + ), + ( + "purolator_express_us_envelope_9_am", + "purolator_express_us_envelope_9_am", + ), + ( + "purolator_express_envelope_10_30_am", + "purolator_express_envelope_10_30_am", + ), + ( + "purolator_express_us_envelope_10_30_am", + "purolator_express_us_envelope_10_30_am", + ), + ( + "purolator_express_envelope_12_pm", + "purolator_express_envelope_12_pm", + ), + ( + "purolator_express_us_envelope_12_00", + "purolator_express_us_envelope_12_00", + ), + ("purolator_express_envelope", "purolator_express_envelope"), + ("purolator_express_pack_us", "purolator_express_pack_us"), + ( + "purolator_express_envelope_evening", + "purolator_express_envelope_evening", + ), + ( + "purolator_express_us_pack_9_am", + "purolator_express_us_pack_9_am", + ), + ("purolator_express_pack_9_am", "purolator_express_pack_9_am"), + ( + "purolator_express_us_pack_10_30_am", + "purolator_express_us_pack_10_30_am", + ), + ( + "purolator_express_pack10_30_am", + "purolator_express_pack10_30_am", + ), + ( + "purolator_express_us_pack_12_00", + "purolator_express_us_pack_12_00", + ), + ("purolator_express_pack_12_pm", "purolator_express_pack_12_pm"), + ("purolator_express_box_us", "purolator_express_box_us"), + ("purolator_express_pack", "purolator_express_pack"), + ("purolator_express_us_box_9_am", "purolator_express_us_box_9_am"), + ( + "purolator_express_pack_evening", + "purolator_express_pack_evening", + ), + ( + "purolator_express_us_box_10_30_am", + "purolator_express_us_box_10_30_am", + ), + ("purolator_express_box_9_am", "purolator_express_box_9_am"), + ( + "purolator_express_us_box_12_00", + "purolator_express_us_box_12_00", + ), + ( + "purolator_express_box_10_30_am", + "purolator_express_box_10_30_am", + ), + ("purolator_ground_us", "purolator_ground_us"), + ("purolator_express_box_12_pm", "purolator_express_box_12_pm"), + ( + "purolator_express_international", + "purolator_express_international", + ), + ("purolator_express_box", "purolator_express_box"), + ( + "purolator_express_international_9_am", + "purolator_express_international_9_am", + ), + ("purolator_express_box_evening", "purolator_express_box_evening"), + ( + "purolator_express_international_10_30_am", + "purolator_express_international_10_30_am", + ), + ("purolator_ground", "purolator_ground"), + ( + "purolator_express_international_12_00", + "purolator_express_international_12_00", + ), + ("purolator_ground_9_am", "purolator_ground_9_am"), + ( + "purolator_express_envelope_international", + "purolator_express_envelope_international", + ), + ("purolator_ground_10_30_am", "purolator_ground_10_30_am"), + ( + "purolator_express_international_envelope_9_am", + "purolator_express_international_envelope_9_am", + ), + ("purolator_ground_evening", "purolator_ground_evening"), + ( + "purolator_express_international_envelope_10_30_am", + "purolator_express_international_envelope_10_30_am", + ), + ("purolator_quick_ship", "purolator_quick_ship"), + ( + "purolator_express_international_envelope_12_00", + "purolator_express_international_envelope_12_00", + ), + ("purolator_quick_ship_envelope", "purolator_quick_ship_envelope"), + ( + "purolator_express_pack_international", + "purolator_express_pack_international", + ), + ("purolator_quick_ship_pack", "purolator_quick_ship_pack"), + ( + "purolator_express_international_pack_9_am", + "purolator_express_international_pack_9_am", + ), + ("purolator_quick_ship_box", "purolator_quick_ship_box"), + ( + "purolator_express_international_pack_10_30_am", + "purolator_express_international_pack_10_30_am", + ), + ( + "purolator_express_international_pack_12_00", + "purolator_express_international_pack_12_00", + ), + ( + "purolator_express_box_international", + "purolator_express_box_international", + ), + ( + "purolator_express_international_box_9_am", + "purolator_express_international_box_9_am", + ), + ( + "purolator_express_international_box_10_30_am", + "purolator_express_international_box_10_30_am", + ), + ( + "purolator_express_international_box_12_00", + "purolator_express_international_box_12_00", + ), + ("roadie_local_delivery", "roadie_local_delivery"), + ("sendle_standard_pickup", "sendle_standard_pickup"), + ("sendle_standard_dropoff", "sendle_standard_dropoff"), + ("sendle_express_pickup", "sendle_express_pickup"), + ("tge_freight_service", "tge_freight_service"), + ("tnt_special_express", "tnt_special_express"), + ("tnt_9_00_express", "tnt_9_00_express"), + ("tnt_10_00_express", "tnt_10_00_express"), + ("tnt_12_00_express", "tnt_12_00_express"), + ("tnt_express", "tnt_express"), + ("tnt_economy_express", "tnt_economy_express"), + ("tnt_global_express", "tnt_global_express"), + ("ups_standard", "ups_standard"), + ("ups_worldwide_express", "ups_worldwide_express"), + ("ups_worldwide_expedited", "ups_worldwide_expedited"), + ("ups_worldwide_express_plus", "ups_worldwide_express_plus"), + ("ups_worldwide_saver", "ups_worldwide_saver"), + ("ups_2nd_day_air", "ups_2nd_day_air"), + ("ups_2nd_day_air_am", "ups_2nd_day_air_am"), + ("ups_3_day_select", "ups_3_day_select"), + ("ups_ground", "ups_ground"), + ("ups_next_day_air", "ups_next_day_air"), + ("ups_next_day_air_early", "ups_next_day_air_early"), + ("ups_next_day_air_saver", "ups_next_day_air_saver"), + ("ups_expedited_ca", "ups_expedited_ca"), + ("ups_express_saver_ca", "ups_express_saver_ca"), + ("ups_3_day_select_ca_us", "ups_3_day_select_ca_us"), + ("ups_access_point_economy_ca", "ups_access_point_economy_ca"), + ("ups_express_ca", "ups_express_ca"), + ("ups_express_early_ca", "ups_express_early_ca"), + ("ups_express_saver_intl_ca", "ups_express_saver_intl_ca"), + ("ups_standard_ca", "ups_standard_ca"), + ("ups_worldwide_expedited_ca", "ups_worldwide_expedited_ca"), + ("ups_worldwide_express_ca", "ups_worldwide_express_ca"), + ("ups_worldwide_express_plus_ca", "ups_worldwide_express_plus_ca"), + ("ups_express_early_ca_us", "ups_express_early_ca_us"), + ("ups_access_point_economy_eu", "ups_access_point_economy_eu"), + ("ups_expedited_eu", "ups_expedited_eu"), + ("ups_express_eu", "ups_express_eu"), + ("ups_standard_eu", "ups_standard_eu"), + ("ups_worldwide_express_plus_eu", "ups_worldwide_express_plus_eu"), + ("ups_worldwide_saver_eu", "ups_worldwide_saver_eu"), + ("ups_access_point_economy_mx", "ups_access_point_economy_mx"), + ("ups_expedited_mx", "ups_expedited_mx"), + ("ups_express_mx", "ups_express_mx"), + ("ups_standard_mx", "ups_standard_mx"), + ("ups_worldwide_express_plus_mx", "ups_worldwide_express_plus_mx"), + ("ups_worldwide_saver_mx", "ups_worldwide_saver_mx"), + ("ups_access_point_economy_pl", "ups_access_point_economy_pl"), + ( + "ups_today_dedicated_courrier_pl", + "ups_today_dedicated_courrier_pl", + ), + ("ups_today_express_pl", "ups_today_express_pl"), + ("ups_today_express_saver_pl", "ups_today_express_saver_pl"), + ("ups_today_standard_pl", "ups_today_standard_pl"), + ("ups_expedited_pl", "ups_expedited_pl"), + ("ups_express_pl", "ups_express_pl"), + ("ups_express_plus_pl", "ups_express_plus_pl"), + ("ups_express_saver_pl", "ups_express_saver_pl"), + ("ups_standard_pl", "ups_standard_pl"), + ("ups_2nd_day_air_pr", "ups_2nd_day_air_pr"), + ("ups_ground_pr", "ups_ground_pr"), + ("ups_next_day_air_pr", "ups_next_day_air_pr"), + ("ups_next_day_air_early_pr", "ups_next_day_air_early_pr"), + ("ups_worldwide_expedited_pr", "ups_worldwide_expedited_pr"), + ("ups_worldwide_express_pr", "ups_worldwide_express_pr"), + ("ups_worldwide_express_plus_pr", "ups_worldwide_express_plus_pr"), + ("ups_worldwide_saver_pr", "ups_worldwide_saver_pr"), + ("ups_express_12_00_de", "ups_express_12_00_de"), + ("ups_worldwide_express_freight", "ups_worldwide_express_freight"), + ( + "ups_worldwide_express_freight_midday", + "ups_worldwide_express_freight_midday", + ), + ("ups_worldwide_economy_ddu", "ups_worldwide_economy_ddu"), + ("ups_worldwide_economy_ddp", "ups_worldwide_economy_ddp"), + ("usps_first_class", "usps_first_class"), + ("usps_first_class_commercial", "usps_first_class_commercial"), + ( + "usps_first_class_hfp_commercial", + "usps_first_class_hfp_commercial", + ), + ("usps_priority", "usps_priority"), + ("usps_priority_commercial", "usps_priority_commercial"), + ("usps_priority_cpp", "usps_priority_cpp"), + ("usps_priority_hfp_commercial", "usps_priority_hfp_commercial"), + ("usps_priority_hfp_cpp", "usps_priority_hfp_cpp"), + ("usps_priority_mail_express", "usps_priority_mail_express"), + ( + "usps_priority_mail_express_commercial", + "usps_priority_mail_express_commercial", + ), + ( + "usps_priority_mail_express_cpp", + "usps_priority_mail_express_cpp", + ), + ("usps_priority_mail_express_sh", "usps_priority_mail_express_sh"), + ( + "usps_priority_mail_express_sh_commercial", + "usps_priority_mail_express_sh_commercial", + ), + ( + "usps_priority_mail_express_hfp", + "usps_priority_mail_express_hfp", + ), + ( + "usps_priority_mail_express_hfp_commercial", + "usps_priority_mail_express_hfp_commercial", + ), + ( + "usps_priority_mail_express_hfp_cpp", + "usps_priority_mail_express_hfp_cpp", + ), + ("usps_priority_mail_cubic", "usps_priority_mail_cubic"), + ("usps_retail_ground", "usps_retail_ground"), + ("usps_media", "usps_media"), + ("usps_library", "usps_library"), + ("usps_all", "usps_all"), + ("usps_online", "usps_online"), + ("usps_plus", "usps_plus"), + ("usps_bpm", "usps_bpm"), + ("usps_ground_advantage", "usps_ground_advantage"), + ( + "usps_ground_advantage_commercial", + "usps_ground_advantage_commercial", + ), + ("usps_ground_advantage_hfp", "usps_ground_advantage_hfp"), + ( + "usps_ground_advantage_hfp_commercial", + "usps_ground_advantage_hfp_commercial", + ), + ("usps_ground_advantage_cubic", "usps_ground_advantage_cubic"), + ("usps_first_class", "usps_first_class"), + ("usps_first_class_commercial", "usps_first_class_commercial"), + ( + "usps_first_class_hfp_commercial", + "usps_first_class_hfp_commercial", + ), + ("usps_priority", "usps_priority"), + ("usps_priority_commercial", "usps_priority_commercial"), + ("usps_priority_cpp", "usps_priority_cpp"), + ("usps_priority_hfp_commercial", "usps_priority_hfp_commercial"), + ("usps_priority_hfp_cpp", "usps_priority_hfp_cpp"), + ("usps_priority_mail_express", "usps_priority_mail_express"), + ( + "usps_priority_mail_express_commercial", + "usps_priority_mail_express_commercial", + ), + ( + "usps_priority_mail_express_cpp", + "usps_priority_mail_express_cpp", + ), + ("usps_priority_mail_express_sh", "usps_priority_mail_express_sh"), + ( + "usps_priority_mail_express_sh_commercial", + "usps_priority_mail_express_sh_commercial", + ), + ( + "usps_priority_mail_express_hfp", + "usps_priority_mail_express_hfp", + ), + ( + "usps_priority_mail_express_hfp_commercial", + "usps_priority_mail_express_hfp_commercial", + ), + ( + "usps_priority_mail_express_hfp_cpp", + "usps_priority_mail_express_hfp_cpp", + ), + ("usps_priority_mail_cubic", "usps_priority_mail_cubic"), + ("usps_retail_ground", "usps_retail_ground"), + ("usps_media", "usps_media"), + ("usps_library", "usps_library"), + ("usps_all", "usps_all"), + ("usps_online", "usps_online"), + ("usps_plus", "usps_plus"), + ("usps_bpm", "usps_bpm"), + ("usps_standard_service", "usps_standard_service"), + ("usps_parcel_select", "usps_parcel_select"), + ( + "usps_parcel_select_lightweight", + "usps_parcel_select_lightweight", + ), + ("usps_priority_mail_express", "usps_priority_mail_express"), + ("usps_priority_mail", "usps_priority_mail"), + ( + "usps_first_class_package_service", + "usps_first_class_package_service", + ), + ("usps_library_mail", "usps_library_mail"), + ("usps_media_mail", "usps_media_mail"), + ("usps_bound_printed_matter", "usps_bound_printed_matter"), + ("usps_connect_local", "usps_connect_local"), + ("usps_connect_mail", "usps_connect_mail"), + ("usps_connect_next_day", "usps_connect_next_day"), + ("usps_connect_regional", "usps_connect_regional"), + ("usps_connect_same_day", "usps_connect_same_day"), + ("usps_ground_advantage", "usps_ground_advantage"), + ("usps_retail_ground", "usps_retail_ground"), + ("usps_all", "usps_all"), + ("usps_standard_service", "usps_standard_service"), + ("usps_parcel_select", "usps_parcel_select"), + ( + "usps_parcel_select_lightweight", + "usps_parcel_select_lightweight", + ), + ("usps_priority_mail_express", "usps_priority_mail_express"), + ("usps_priority_mail", "usps_priority_mail"), + ( + "usps_first_class_package_service", + "usps_first_class_package_service", + ), + ("usps_library_mail", "usps_library_mail"), + ("usps_media_mail", "usps_media_mail"), + ("usps_bound_printed_matter", "usps_bound_printed_matter"), + ("usps_connect_local", "usps_connect_local"), + ("usps_connect_mail", "usps_connect_mail"), + ("usps_connect_next_day", "usps_connect_next_day"), + ("usps_connect_regional", "usps_connect_regional"), + ("usps_connect_same_day", "usps_connect_same_day"), + ("usps_ground_advantage", "usps_ground_advantage"), + ("usps_retail_ground", "usps_retail_ground"), + ("usps_all", "usps_all"), + ("zoom2u_VIP", "zoom2u_VIP"), + ("zoom2u_3_hour", "zoom2u_3_hour"), + ("zoom2u_same_day", "zoom2u_same_day"), + ], + help_text="\n The list of services you want to apply the surcharge to.\n
\n Note that by default, the surcharge is applied to all services\n ", + null=True, + ), + ), + ] diff --git a/requirements.build.txt b/requirements.build.txt index 49779f97c6..855e242352 100644 --- a/requirements.build.txt +++ b/requirements.build.txt @@ -39,9 +39,12 @@ Django==4.2.14 -e ./modules/connectors/ups -e ./modules/connectors/usps -e ./modules/connectors/usps_international +-e ./modules/connectors/usps_rest +-e ./modules/connectors/usps_rest_international -e ./modules/connectors/easypost -e ./modules/connectors/eshipper +-e ./modules/connectors/eshipper_xml -e ./modules/connectors/freightcom -e ./modules/connectors/locate2u -e ./modules/connectors/zoom2u diff --git a/requirements.sdk.dev.txt b/requirements.sdk.dev.txt index bd60c51ec3..291b1f9c50 100644 --- a/requirements.sdk.dev.txt +++ b/requirements.sdk.dev.txt @@ -35,10 +35,13 @@ -e ./modules/connectors/ups -e ./modules/connectors/usps -e ./modules/connectors/usps_international +-e ./modules/connectors/usps_rest +-e ./modules/connectors/usps_rest_international # Carrier Hub Extentions packages -e ./modules/connectors/easypost -e ./modules/connectors/eshipper +-e ./modules/connectors/eshipper_xml -e ./modules/connectors/freightcom -e ./modules/connectors/locate2u -e ./modules/connectors/zoom2u diff --git a/requirements.server.dev.txt b/requirements.server.dev.txt index dcd592d7a7..1f22ba42b5 100644 --- a/requirements.server.dev.txt +++ b/requirements.server.dev.txt @@ -39,9 +39,12 @@ Django==4.2.14 -e ./modules/connectors/ups -e ./modules/connectors/usps -e ./modules/connectors/usps_international +-e ./modules/connectors/usps_rest +-e ./modules/connectors/usps_rest_international -e ./modules/connectors/easypost -e ./modules/connectors/eshipper +-e ./modules/connectors/eshipper_xml -e ./modules/connectors/freightcom -e ./modules/connectors/locate2u -e ./modules/connectors/zoom2u diff --git a/source.requirements.txt b/source.requirements.txt index 4622c31ca7..1c53264a53 100644 --- a/source.requirements.txt +++ b/source.requirements.txt @@ -68,6 +68,7 @@ karrio.dpd @ file://${PWD}/modules/connectors/dpd karrio.dpdhl @ file://${PWD}/modules/connectors/dpdhl karrio.easypost @ file://${PWD}/modules/connectors/easypost karrio.eshipper @ file://${PWD}/modules/connectors/eshipper +karrio.eshipper_xml @ file://${PWD}/modules/connectors/eshipper_xml karrio.fedex @ file://${PWD}/modules/connectors/fedex karrio.fedex_ws @ file://${PWD}/modules/connectors/fedex_ws karrio.freightcom @ file://${PWD}/modules/connectors/freightcom @@ -98,6 +99,8 @@ karrio.tnt @ file://${PWD}/modules/connectors/tnt karrio.ups @ file://${PWD}/modules/connectors/ups karrio.usps @ file://${PWD}/modules/connectors/usps karrio.usps-international @ file://${PWD}/modules/connectors/usps_international +karrio.usps-rest @ file://${PWD}/modules/connectors/usps_rest +karrio.usps-rest-international @ file://${PWD}/modules/connectors/usps_rest_international karrio.zoom2u @ file://${PWD}/modules/connectors/zoom2u lxml==4.9.4 lxml-stubs==0.5.1